| { | |
| "best_metric": 4.286992073059082, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/2/checkpoints/checkpoint-305280", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 305280, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.8206, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 7.554, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 7.0624, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 7.0013, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 6.9543, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 6.9143, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 6.7423, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 6.6408, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 6.5414, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 6.4563, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 6.3913, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 6.3289, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 6.261, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989101543997071e-05, | |
| "loss": 6.1967, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988262949246018e-05, | |
| "loss": 6.1493, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987424354494966e-05, | |
| "loss": 6.0967, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986585759743914e-05, | |
| "loss": 6.0539, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985747164992862e-05, | |
| "loss": 6.0146, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.98490857024181e-05, | |
| "loss": 5.9802, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984069975490758e-05, | |
| "loss": 5.9423, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983231380739706e-05, | |
| "loss": 5.9104, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 5.8715, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 5.8492, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 5.8079, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 5.7899, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 5.7587, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 5.7358, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 5.7155, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 5.6849, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 5.6593, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 5.6479, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 5.6353, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 5.6146, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9723329247367764e-05, | |
| "loss": 5.5901, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9714943299857244e-05, | |
| "loss": 5.5845, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 5.5581, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969820416244367e-05, | |
| "loss": 5.5454, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 5.5124, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 5.5149, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 5.493, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 5.4842, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 5.4728, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964790485618428e-05, | |
| "loss": 5.4662, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963951890867376e-05, | |
| "loss": 5.4354, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963113296116324e-05, | |
| "loss": 5.4295, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 5.4243, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 5.409, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 5.4109, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 5.3885, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 5.3679, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958083365490385e-05, | |
| "loss": 5.3762, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957244770739333e-05, | |
| "loss": 5.3544, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 5.3493, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955567581237229e-05, | |
| "loss": 5.3211, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954728986486177e-05, | |
| "loss": 5.3322, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 5.3092, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 5.3357, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 5.2975, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951376245362342e-05, | |
| "loss": 5.2987, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95053765061129e-05, | |
| "loss": 5.2902, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949699055860238e-05, | |
| "loss": 5.2708, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948862098989559e-05, | |
| "loss": 5.2577, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948023504238507e-05, | |
| "loss": 5.2586, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947184909487455e-05, | |
| "loss": 5.2407, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 5.2347, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945510995746097e-05, | |
| "loss": 5.2516, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944672400995045e-05, | |
| "loss": 5.2333, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943833806243993e-05, | |
| "loss": 5.218, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942995211492941e-05, | |
| "loss": 5.1964, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942156616741889e-05, | |
| "loss": 5.1919, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941318021990837e-05, | |
| "loss": 5.2002, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404794272397856e-05, | |
| "loss": 5.1976, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396408324887336e-05, | |
| "loss": 5.1903, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 5.1874, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379636429866296e-05, | |
| "loss": 5.1781, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371250482355776e-05, | |
| "loss": 5.171, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362880913648985e-05, | |
| "loss": 5.1569, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354494966138465e-05, | |
| "loss": 5.1484, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346109018627945e-05, | |
| "loss": 5.1412, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337739449921154e-05, | |
| "loss": 5.1299, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329353502410634e-05, | |
| "loss": 5.137, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9320967554900114e-05, | |
| "loss": 5.1269, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9312581607389594e-05, | |
| "loss": 5.13, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9304195659879074e-05, | |
| "loss": 5.1094, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295809712368554e-05, | |
| "loss": 5.0915, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287423764858034e-05, | |
| "loss": 5.1058, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 5.1011, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9270651869836994e-05, | |
| "loss": 5.0976, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9262265922326474e-05, | |
| "loss": 5.0828, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9253879974815954e-05, | |
| "loss": 5.0861, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9245494027305433e-05, | |
| "loss": 5.0727, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923712445859864e-05, | |
| "loss": 5.0668, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922873851108812e-05, | |
| "loss": 5.0653, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.92203525635776e-05, | |
| "loss": 5.0611, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921196661606708e-05, | |
| "loss": 5.0627, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920358066855656e-05, | |
| "loss": 5.0566, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919519472104604e-05, | |
| "loss": 5.0542, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918680877353552e-05, | |
| "loss": 5.0321, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917843920482874e-05, | |
| "loss": 5.0285, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917005325731822e-05, | |
| "loss": 5.0269, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91616673098077e-05, | |
| "loss": 5.0268, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.915328136229718e-05, | |
| "loss": 5.0191, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914491179359039e-05, | |
| "loss": 5.012, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913652584607987e-05, | |
| "loss": 5.0031, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128156277373076e-05, | |
| "loss": 5.0107, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119770329862556e-05, | |
| "loss": 4.9842, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111400761155765e-05, | |
| "loss": 4.9914, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9103014813645245e-05, | |
| "loss": 4.9879, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.909462886613473e-05, | |
| "loss": 4.98, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908624291862421e-05, | |
| "loss": 4.9758, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907785697111369e-05, | |
| "loss": 4.9802, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906947102360317e-05, | |
| "loss": 4.967, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906108507609265e-05, | |
| "loss": 4.9692, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905269912858213e-05, | |
| "loss": 4.9623, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904432955987534e-05, | |
| "loss": 4.9503, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903594361236482e-05, | |
| "loss": 4.9566, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90275576648543e-05, | |
| "loss": 4.9542, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901918809614751e-05, | |
| "loss": 4.9492, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901080214863699e-05, | |
| "loss": 4.9355, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.900241620112647e-05, | |
| "loss": 4.9415, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.899403025361595e-05, | |
| "loss": 4.9435, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.898564430610543e-05, | |
| "loss": 4.9307, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897725835859491e-05, | |
| "loss": 4.9178, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968872411084396e-05, | |
| "loss": 4.9269, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.896048646357387e-05, | |
| "loss": 4.9257, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.895210051606335e-05, | |
| "loss": 4.9131, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.894371456855283e-05, | |
| "loss": 4.908, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.893532862104231e-05, | |
| "loss": 4.8988, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.892695905233552e-05, | |
| "loss": 4.9032, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918573104825e-05, | |
| "loss": 4.8938, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891018715731448e-05, | |
| "loss": 4.9078, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890180120980396e-05, | |
| "loss": 4.8976, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889341526229344e-05, | |
| "loss": 4.8796, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888502931478292e-05, | |
| "loss": 4.887, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8876659746076134e-05, | |
| "loss": 4.885, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8868273798565614e-05, | |
| "loss": 4.8804, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8859887851055094e-05, | |
| "loss": 4.884, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8851501903544574e-05, | |
| "loss": 4.8704, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8843115956034054e-05, | |
| "loss": 4.873, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8834730008523534e-05, | |
| "loss": 4.8786, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8826344061013014e-05, | |
| "loss": 4.8619, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8817958113502494e-05, | |
| "loss": 4.8579, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8809572165991974e-05, | |
| "loss": 4.8597, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880120259728518e-05, | |
| "loss": 4.8461, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879281664977466e-05, | |
| "loss": 4.8452, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878443070226414e-05, | |
| "loss": 4.8503, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877604475475362e-05, | |
| "loss": 4.8354, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87676588072431e-05, | |
| "loss": 4.8393, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875927285973258e-05, | |
| "loss": 4.8436, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875088691222206e-05, | |
| "loss": 4.8395, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.797691345214844, | |
| "eval_runtime": 298.3239, | |
| "eval_samples_per_second": 1279.116, | |
| "eval_steps_per_second": 39.973, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874251734351528e-05, | |
| "loss": 4.8214, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873413139600475e-05, | |
| "loss": 4.8208, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872574544849423e-05, | |
| "loss": 4.8377, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871735950098371e-05, | |
| "loss": 4.8212, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870897355347319e-05, | |
| "loss": 4.8322, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870058760596267e-05, | |
| "loss": 4.8043, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.869220165845215e-05, | |
| "loss": 4.8221, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.868383208974536e-05, | |
| "loss": 4.8045, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675462521038576e-05, | |
| "loss": 4.7989, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667076573528056e-05, | |
| "loss": 4.8059, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865870700482127e-05, | |
| "loss": 4.8087, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865032105731075e-05, | |
| "loss": 4.8093, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.864195148860396e-05, | |
| "loss": 4.7914, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.863356554109344e-05, | |
| "loss": 4.7907, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862517959358292e-05, | |
| "loss": 4.783, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.86167936460724e-05, | |
| "loss": 4.7841, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608407698561874e-05, | |
| "loss": 4.7822, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8600021751051354e-05, | |
| "loss": 4.7827, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591635803540834e-05, | |
| "loss": 4.777, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8583249856030314e-05, | |
| "loss": 4.7977, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8574863908519794e-05, | |
| "loss": 4.7717, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8566477961009274e-05, | |
| "loss": 4.7818, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558092013498754e-05, | |
| "loss": 4.7678, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854970606598824e-05, | |
| "loss": 4.7768, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854132011847772e-05, | |
| "loss": 4.7645, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853295054977093e-05, | |
| "loss": 4.7636, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852456460226041e-05, | |
| "loss": 4.7584, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851617865474989e-05, | |
| "loss": 4.7553, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850779270723937e-05, | |
| "loss": 4.7384, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849940675972885e-05, | |
| "loss": 4.754, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849102081221833e-05, | |
| "loss": 4.7511, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848263486470781e-05, | |
| "loss": 4.7543, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847424891719729e-05, | |
| "loss": 4.7446, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84658793484905e-05, | |
| "loss": 4.747, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845750977978371e-05, | |
| "loss": 4.7382, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8449123832273194e-05, | |
| "loss": 4.7346, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8440737884762674e-05, | |
| "loss": 4.7193, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8432351937252154e-05, | |
| "loss": 4.7271, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8423965989741634e-05, | |
| "loss": 4.7247, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415580042231114e-05, | |
| "loss": 4.7233, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840719409472059e-05, | |
| "loss": 4.7271, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839880814721007e-05, | |
| "loss": 4.7266, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839043857850328e-05, | |
| "loss": 4.7149, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838205263099276e-05, | |
| "loss": 4.7205, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8373666683482236e-05, | |
| "loss": 4.7189, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8365280735971716e-05, | |
| "loss": 4.7052, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8356894788461196e-05, | |
| "loss": 4.7188, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8348508840950676e-05, | |
| "loss": 4.706, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834013927224389e-05, | |
| "loss": 4.6933, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.833175332473337e-05, | |
| "loss": 4.7115, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.832336737722285e-05, | |
| "loss": 4.6952, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831498142971233e-05, | |
| "loss": 4.6934, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.830659548220181e-05, | |
| "loss": 4.682, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.829820953469129e-05, | |
| "loss": 4.6918, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82898399659845e-05, | |
| "loss": 4.6794, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828145401847398e-05, | |
| "loss": 4.7121, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.827306807096346e-05, | |
| "loss": 4.6811, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826468212345294e-05, | |
| "loss": 4.6934, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825631255474615e-05, | |
| "loss": 4.6897, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.824792660723563e-05, | |
| "loss": 4.6764, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823954065972511e-05, | |
| "loss": 4.6671, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82311547122146e-05, | |
| "loss": 4.6787, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222768764704077e-05, | |
| "loss": 4.6643, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8214399195997286e-05, | |
| "loss": 4.6631, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8206013248486766e-05, | |
| "loss": 4.6812, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8197627300976246e-05, | |
| "loss": 4.6768, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8189241353465726e-05, | |
| "loss": 4.6593, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8180855405955206e-05, | |
| "loss": 4.654, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8172469458444685e-05, | |
| "loss": 4.6533, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8164083510934165e-05, | |
| "loss": 4.6649, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155697563423645e-05, | |
| "loss": 4.6636, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147311615913125e-05, | |
| "loss": 4.6621, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8138942047206334e-05, | |
| "loss": 4.6618, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130556099695814e-05, | |
| "loss": 4.6632, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122170152185294e-05, | |
| "loss": 4.6612, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113784204674774e-05, | |
| "loss": 4.6458, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.810541463596799e-05, | |
| "loss": 4.6493, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.809702868845747e-05, | |
| "loss": 4.6463, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808864274094695e-05, | |
| "loss": 4.6329, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808025679343642e-05, | |
| "loss": 4.6478, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80718708459259e-05, | |
| "loss": 4.6465, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.806348489841538e-05, | |
| "loss": 4.6491, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80551153297086e-05, | |
| "loss": 4.6288, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804672938219807e-05, | |
| "loss": 4.6202, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803835981349129e-05, | |
| "loss": 4.6377, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802997386598077e-05, | |
| "loss": 4.633, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802158791847025e-05, | |
| "loss": 4.638, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801320197095973e-05, | |
| "loss": 4.6308, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800481602344921e-05, | |
| "loss": 4.6325, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7996446454742424e-05, | |
| "loss": 4.6266, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79880605072319e-05, | |
| "loss": 4.6188, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797967455972138e-05, | |
| "loss": 4.6232, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797128861221086e-05, | |
| "loss": 4.6226, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796290266470034e-05, | |
| "loss": 4.6279, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.795451671718982e-05, | |
| "loss": 4.6292, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79461307696793e-05, | |
| "loss": 4.6255, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937761200972506e-05, | |
| "loss": 4.6116, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929375253461986e-05, | |
| "loss": 4.6107, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792098930595147e-05, | |
| "loss": 4.6093, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.791260335844095e-05, | |
| "loss": 4.6193, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790421741093043e-05, | |
| "loss": 4.6058, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.789583146341991e-05, | |
| "loss": 4.6046, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.788744551590939e-05, | |
| "loss": 4.5983, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787905956839887e-05, | |
| "loss": 4.6122, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787067362088835e-05, | |
| "loss": 4.586, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786230405218156e-05, | |
| "loss": 4.5988, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785391810467104e-05, | |
| "loss": 4.6, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784553215716052e-05, | |
| "loss": 4.5937, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783714620965e-05, | |
| "loss": 4.5916, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782876026213948e-05, | |
| "loss": 4.602, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782037431462896e-05, | |
| "loss": 4.5867, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781200474592217e-05, | |
| "loss": 4.591, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780361879841165e-05, | |
| "loss": 4.5942, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7795249229704866e-05, | |
| "loss": 4.5823, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7786863282194346e-05, | |
| "loss": 4.5913, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7778477334683826e-05, | |
| "loss": 4.5965, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7770091387173306e-05, | |
| "loss": 4.5882, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7761705439662786e-05, | |
| "loss": 4.5755, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775331949215226e-05, | |
| "loss": 4.5883, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7744949923445475e-05, | |
| "loss": 4.5865, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736563975934955e-05, | |
| "loss": 4.5784, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7728178028424435e-05, | |
| "loss": 4.5726, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771979208091391e-05, | |
| "loss": 4.576, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771140613340339e-05, | |
| "loss": 4.5879, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7703020185892875e-05, | |
| "loss": 4.5755, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7694634238382355e-05, | |
| "loss": 4.5716, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7686248290871835e-05, | |
| "loss": 4.5619, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7677862343361315e-05, | |
| "loss": 4.5685, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7669476395850795e-05, | |
| "loss": 4.5636, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7661106827144004e-05, | |
| "loss": 4.5792, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7652720879633484e-05, | |
| "loss": 4.575, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7644334932122964e-05, | |
| "loss": 4.5551, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763596536341617e-05, | |
| "loss": 4.5618, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762757941590565e-05, | |
| "loss": 4.5655, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761920984719886e-05, | |
| "loss": 4.564, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761082389968834e-05, | |
| "loss": 4.5707, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760243795217783e-05, | |
| "loss": 4.5566, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.759405200466731e-05, | |
| "loss": 4.5598, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758566605715679e-05, | |
| "loss": 4.5671, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757728010964627e-05, | |
| "loss": 4.5533, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756889416213575e-05, | |
| "loss": 4.5545, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756050821462523e-05, | |
| "loss": 4.5596, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.755213864591844e-05, | |
| "loss": 4.544, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.754375269840792e-05, | |
| "loss": 4.5479, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75353667508974e-05, | |
| "loss": 4.5547, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.752698080338688e-05, | |
| "loss": 4.5406, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751859485587636e-05, | |
| "loss": 4.5472, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751020890836584e-05, | |
| "loss": 4.5504, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.750182296085532e-05, | |
| "loss": 4.5474, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.510871410369873, | |
| "eval_runtime": 296.5965, | |
| "eval_samples_per_second": 1286.566, | |
| "eval_steps_per_second": 40.206, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74934370133448e-05, | |
| "loss": 4.5325, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.748505106583428e-05, | |
| "loss": 4.5329, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.747666511832376e-05, | |
| "loss": 4.5536, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.746827917081324e-05, | |
| "loss": 4.532, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.745989322330272e-05, | |
| "loss": 4.5484, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74515072757922e-05, | |
| "loss": 4.5299, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.744312132828168e-05, | |
| "loss": 4.5444, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.743473538077116e-05, | |
| "loss": 4.5256, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7426349433260637e-05, | |
| "loss": 4.5355, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7417963485750117e-05, | |
| "loss": 4.5314, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7409577538239597e-05, | |
| "loss": 4.5356, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7401191590729076e-05, | |
| "loss": 4.5416, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7392838400826015e-05, | |
| "loss": 4.5214, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7384452453315495e-05, | |
| "loss": 4.5249, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7376066505804975e-05, | |
| "loss": 4.5194, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736768055829446e-05, | |
| "loss": 4.5174, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735929461078394e-05, | |
| "loss": 4.5201, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735090866327342e-05, | |
| "loss": 4.5245, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.73425227157629e-05, | |
| "loss": 4.5174, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733413676825238e-05, | |
| "loss": 4.5415, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732575082074186e-05, | |
| "loss": 4.5209, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7317364873231334e-05, | |
| "loss": 4.5259, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7308978925720814e-05, | |
| "loss": 4.5174, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7300592978210294e-05, | |
| "loss": 4.5269, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7292207030699774e-05, | |
| "loss": 4.5163, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728383746199298e-05, | |
| "loss": 4.5107, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727545151448246e-05, | |
| "loss": 4.5148, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726706556697194e-05, | |
| "loss": 4.5087, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725869599826516e-05, | |
| "loss": 4.4927, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725031005075464e-05, | |
| "loss": 4.5096, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724192410324412e-05, | |
| "loss": 4.515, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.72335381557336e-05, | |
| "loss": 4.5109, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722515220822308e-05, | |
| "loss": 4.5076, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721676626071256e-05, | |
| "loss": 4.5082, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720838031320204e-05, | |
| "loss": 4.5017, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719999436569152e-05, | |
| "loss": 4.4965, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7191608418181e-05, | |
| "loss": 4.4929, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718323884947421e-05, | |
| "loss": 4.4912, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717485290196369e-05, | |
| "loss": 4.4925, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716646695445317e-05, | |
| "loss": 4.493, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.715808100694265e-05, | |
| "loss": 4.4996, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714969505943213e-05, | |
| "loss": 4.5, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7141325490725343e-05, | |
| "loss": 4.4905, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7132939543214823e-05, | |
| "loss": 4.4949, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7124553595704303e-05, | |
| "loss": 4.4985, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.711616764819378e-05, | |
| "loss": 4.4804, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710779807948699e-05, | |
| "loss": 4.4952, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709941213197647e-05, | |
| "loss": 4.4883, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709102618446595e-05, | |
| "loss": 4.4699, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.708264023695543e-05, | |
| "loss": 4.4901, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.707425428944491e-05, | |
| "loss": 4.4821, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706586834193439e-05, | |
| "loss": 4.4765, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7057482394423865e-05, | |
| "loss": 4.4729, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704909644691335e-05, | |
| "loss": 4.4769, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704072687820657e-05, | |
| "loss": 4.4643, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703235730949978e-05, | |
| "loss": 4.4936, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702397136198926e-05, | |
| "loss": 4.4736, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701558541447874e-05, | |
| "loss": 4.4809, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.700719946696822e-05, | |
| "loss": 4.4835, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69988135194577e-05, | |
| "loss": 4.4683, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699042757194717e-05, | |
| "loss": 4.46, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6982058003240386e-05, | |
| "loss": 4.4751, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6973672055729866e-05, | |
| "loss": 4.4614, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.696528610821934e-05, | |
| "loss": 4.4566, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695690016070882e-05, | |
| "loss": 4.4767, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6948514213198306e-05, | |
| "loss": 4.4742, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6940128265687786e-05, | |
| "loss": 4.4579, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6931742318177266e-05, | |
| "loss": 4.4532, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6923372749470475e-05, | |
| "loss": 4.4559, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6914986801959955e-05, | |
| "loss": 4.4631, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6906600854449435e-05, | |
| "loss": 4.4666, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6898214906938915e-05, | |
| "loss": 4.4658, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6889828959428395e-05, | |
| "loss": 4.4672, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6881443011917875e-05, | |
| "loss": 4.4654, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6873057064407355e-05, | |
| "loss": 4.469, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6864687495700564e-05, | |
| "loss": 4.4535, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6856301548190044e-05, | |
| "loss": 4.4567, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.684793197948325e-05, | |
| "loss": 4.4591, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683954603197274e-05, | |
| "loss": 4.44, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683116008446222e-05, | |
| "loss": 4.4567, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68227741369517e-05, | |
| "loss": 4.4616, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.681438818944118e-05, | |
| "loss": 4.4598, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680600224193066e-05, | |
| "loss": 4.4453, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.679761629442014e-05, | |
| "loss": 4.4337, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678923034690962e-05, | |
| "loss": 4.4522, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.67808443993991e-05, | |
| "loss": 4.4412, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677245845188858e-05, | |
| "loss": 4.4573, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676408888318179e-05, | |
| "loss": 4.4477, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675570293567127e-05, | |
| "loss": 4.4503, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674731698816075e-05, | |
| "loss": 4.4446, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673893104065023e-05, | |
| "loss": 4.4341, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673056147194344e-05, | |
| "loss": 4.4425, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722175524432924e-05, | |
| "loss": 4.4407, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6713789576922404e-05, | |
| "loss": 4.4521, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6705403629411884e-05, | |
| "loss": 4.4543, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669701768190136e-05, | |
| "loss": 4.4458, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668863173439084e-05, | |
| "loss": 4.4317, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668024578688032e-05, | |
| "loss": 4.4367, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66718598393698e-05, | |
| "loss": 4.4363, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666347389185928e-05, | |
| "loss": 4.4435, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665512070195622e-05, | |
| "loss": 4.432, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664675113324943e-05, | |
| "loss": 4.4337, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663836518573891e-05, | |
| "loss": 4.4241, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662997923822839e-05, | |
| "loss": 4.444, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662159329071788e-05, | |
| "loss": 4.4153, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661320734320736e-05, | |
| "loss": 4.4299, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660482139569683e-05, | |
| "loss": 4.4233, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659643544818631e-05, | |
| "loss": 4.428, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.658804950067579e-05, | |
| "loss": 4.423, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657966355316527e-05, | |
| "loss": 4.4336, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657129398445848e-05, | |
| "loss": 4.4261, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656290803694796e-05, | |
| "loss": 4.4212, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655452208943744e-05, | |
| "loss": 4.4295, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654613614192692e-05, | |
| "loss": 4.4142, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65377501944164e-05, | |
| "loss": 4.4331, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652936424690588e-05, | |
| "loss": 4.4374, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6520994678199095e-05, | |
| "loss": 4.4212, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6512608730688575e-05, | |
| "loss": 4.4171, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6504222783178055e-05, | |
| "loss": 4.4229, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6495836835667535e-05, | |
| "loss": 4.4249, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6487450888157015e-05, | |
| "loss": 4.417, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6479064940646495e-05, | |
| "loss": 4.4145, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6470678993135975e-05, | |
| "loss": 4.4125, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6462293045625455e-05, | |
| "loss": 4.4302, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6453923476918664e-05, | |
| "loss": 4.4198, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6445537529408144e-05, | |
| "loss": 4.4142, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6437151581897624e-05, | |
| "loss": 4.4031, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642878201319083e-05, | |
| "loss": 4.4142, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642039606568031e-05, | |
| "loss": 4.4049, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64120101181698e-05, | |
| "loss": 4.4239, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640362417065928e-05, | |
| "loss": 4.4196, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639523822314876e-05, | |
| "loss": 4.4054, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638685227563824e-05, | |
| "loss": 4.402, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637846632812772e-05, | |
| "loss": 4.4119, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637008038061719e-05, | |
| "loss": 4.4086, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636169443310667e-05, | |
| "loss": 4.4213, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635332486439989e-05, | |
| "loss": 4.4008, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.634493891688936e-05, | |
| "loss": 4.4126, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633655296937884e-05, | |
| "loss": 4.41, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632816702186832e-05, | |
| "loss": 4.4075, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631979745316154e-05, | |
| "loss": 4.4074, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631141150565102e-05, | |
| "loss": 4.408, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63030255581405e-05, | |
| "loss": 4.3974, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.629463961062998e-05, | |
| "loss": 4.4028, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.628627004192319e-05, | |
| "loss": 4.4062, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6277884094412666e-05, | |
| "loss": 4.3987, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6269498146902146e-05, | |
| "loss": 4.3952, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6261112199391626e-05, | |
| "loss": 4.4041, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6252742630684835e-05, | |
| "loss": 4.4015, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.372279167175293, | |
| "eval_runtime": 293.5676, | |
| "eval_samples_per_second": 1299.84, | |
| "eval_steps_per_second": 40.621, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6244356683174315e-05, | |
| "loss": 4.3951, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6235970735663795e-05, | |
| "loss": 4.3849, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6227584788153275e-05, | |
| "loss": 4.4101, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6219215219446484e-05, | |
| "loss": 4.3895, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621082927193597e-05, | |
| "loss": 4.4094, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620244332442545e-05, | |
| "loss": 4.3834, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.619405737691493e-05, | |
| "loss": 4.4034, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.618567142940441e-05, | |
| "loss": 4.3876, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617728548189389e-05, | |
| "loss": 4.3915, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61689159131871e-05, | |
| "loss": 4.394, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616052996567658e-05, | |
| "loss": 4.3945, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.615214401816606e-05, | |
| "loss": 4.4011, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.614377444945927e-05, | |
| "loss": 4.3847, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613538850194875e-05, | |
| "loss": 4.3834, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.612700255443823e-05, | |
| "loss": 4.3875, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611861660692771e-05, | |
| "loss": 4.3737, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611023065941719e-05, | |
| "loss": 4.3844, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.610184471190667e-05, | |
| "loss": 4.3894, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6093458764396156e-05, | |
| "loss": 4.3802, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6085072816885636e-05, | |
| "loss": 4.4027, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6076686869375116e-05, | |
| "loss": 4.3894, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6068317300668325e-05, | |
| "loss": 4.395, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6059931353157805e-05, | |
| "loss": 4.3826, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6051545405647285e-05, | |
| "loss": 4.3887, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6043159458136764e-05, | |
| "loss": 4.383, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6034789889429974e-05, | |
| "loss": 4.3789, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6026403941919454e-05, | |
| "loss": 4.3822, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6018017994408933e-05, | |
| "loss": 4.3742, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6009632046898413e-05, | |
| "loss": 4.369, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.600126247819162e-05, | |
| "loss": 4.3731, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599287653068111e-05, | |
| "loss": 4.3829, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.598449058317059e-05, | |
| "loss": 4.3821, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.597610463566007e-05, | |
| "loss": 4.3789, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.596771868814955e-05, | |
| "loss": 4.3818, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595934911944276e-05, | |
| "loss": 4.3658, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595096317193224e-05, | |
| "loss": 4.3694, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.594257722442172e-05, | |
| "loss": 4.3693, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.59341912769112e-05, | |
| "loss": 4.3606, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.592580532940067e-05, | |
| "loss": 4.3675, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.591741938189015e-05, | |
| "loss": 4.3642, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590903343437963e-05, | |
| "loss": 4.3758, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590066386567285e-05, | |
| "loss": 4.3748, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.589227791816233e-05, | |
| "loss": 4.3656, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.588389197065181e-05, | |
| "loss": 4.3657, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.587550602314129e-05, | |
| "loss": 4.3757, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.586712007563077e-05, | |
| "loss": 4.3581, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5858750506923976e-05, | |
| "loss": 4.3714, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5850364559413456e-05, | |
| "loss": 4.3627, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5841978611902936e-05, | |
| "loss": 4.347, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5833609043196145e-05, | |
| "loss": 4.3708, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5825223095685625e-05, | |
| "loss": 4.3583, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5816837148175105e-05, | |
| "loss": 4.3583, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5808451200664585e-05, | |
| "loss": 4.3486, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5800065253154065e-05, | |
| "loss": 4.3563, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5791679305643545e-05, | |
| "loss": 4.3447, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5783293358133025e-05, | |
| "loss": 4.3666, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577490741062251e-05, | |
| "loss": 4.356, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576652146311199e-05, | |
| "loss": 4.3594, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.575813551560147e-05, | |
| "loss": 4.365, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574974956809095e-05, | |
| "loss": 4.3507, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574136362058043e-05, | |
| "loss": 4.3388, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.573301043067737e-05, | |
| "loss": 4.3605, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.572462448316685e-05, | |
| "loss": 4.3439, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.571623853565633e-05, | |
| "loss": 4.3409, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.570785258814581e-05, | |
| "loss": 4.3588, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569946664063529e-05, | |
| "loss": 4.3559, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569108069312477e-05, | |
| "loss": 4.3434, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568269474561425e-05, | |
| "loss": 4.3416, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5674325176907465e-05, | |
| "loss": 4.334, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5665939229396945e-05, | |
| "loss": 4.3451, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5657553281886425e-05, | |
| "loss": 4.3529, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5649167334375905e-05, | |
| "loss": 4.3534, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5640781386865385e-05, | |
| "loss": 4.3523, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563239543935486e-05, | |
| "loss": 4.3473, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.562400949184434e-05, | |
| "loss": 4.3611, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5615639923137554e-05, | |
| "loss": 4.3373, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5607253975627034e-05, | |
| "loss": 4.3471, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559886802811651e-05, | |
| "loss": 4.3443, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559048208060599e-05, | |
| "loss": 4.3265, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.558209613309547e-05, | |
| "loss": 4.3505, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.557372656438868e-05, | |
| "loss": 4.3485, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556534061687816e-05, | |
| "loss": 4.3444, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.555695466936764e-05, | |
| "loss": 4.3363, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554856872185712e-05, | |
| "loss": 4.3276, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55401827743466e-05, | |
| "loss": 4.3404, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553179682683608e-05, | |
| "loss": 4.3376, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552341087932556e-05, | |
| "loss": 4.3449, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.551504131061877e-05, | |
| "loss": 4.3348, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.550667174191198e-05, | |
| "loss": 4.345, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549828579440146e-05, | |
| "loss": 4.332, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548989984689094e-05, | |
| "loss": 4.3325, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548151389938042e-05, | |
| "loss": 4.3317, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.54731279518699e-05, | |
| "loss": 4.329, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.546474200435939e-05, | |
| "loss": 4.3477, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.545635605684887e-05, | |
| "loss": 4.3438, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.544797010933835e-05, | |
| "loss": 4.3448, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543958416182783e-05, | |
| "loss": 4.3238, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5431214593121036e-05, | |
| "loss": 4.3315, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5422828645610516e-05, | |
| "loss": 4.3325, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5414442698099996e-05, | |
| "loss": 4.3359, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406056750589476e-05, | |
| "loss": 4.3293, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5397670803078956e-05, | |
| "loss": 4.3263, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389284855568436e-05, | |
| "loss": 4.3188, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5380898908057916e-05, | |
| "loss": 4.3418, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5372512960547396e-05, | |
| "loss": 4.3121, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5364143391840605e-05, | |
| "loss": 4.3247, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.535577382313382e-05, | |
| "loss": 4.3214, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.53473878756233e-05, | |
| "loss": 4.3244, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533900192811278e-05, | |
| "loss": 4.32, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533061598060226e-05, | |
| "loss": 4.3331, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.532223003309174e-05, | |
| "loss": 4.3222, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.531384408558122e-05, | |
| "loss": 4.3153, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530547451687443e-05, | |
| "loss": 4.3321, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.529710494816764e-05, | |
| "loss": 4.3146, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528871900065712e-05, | |
| "loss": 4.3291, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52803330531466e-05, | |
| "loss": 4.3373, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527194710563608e-05, | |
| "loss": 4.3241, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.526356115812556e-05, | |
| "loss": 4.3181, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.525517521061504e-05, | |
| "loss": 4.3174, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.524678926310452e-05, | |
| "loss": 4.3223, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5238403315594e-05, | |
| "loss": 4.3191, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523001736808348e-05, | |
| "loss": 4.3211, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5221647799376694e-05, | |
| "loss": 4.3098, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.521326185186617e-05, | |
| "loss": 4.3309, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.520487590435565e-05, | |
| "loss": 4.323, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.519648995684513e-05, | |
| "loss": 4.3183, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.518810400933461e-05, | |
| "loss": 4.3023, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517971806182409e-05, | |
| "loss": 4.3164, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5171348493117297e-05, | |
| "loss": 4.3096, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5162962545606777e-05, | |
| "loss": 4.3218, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5154576598096256e-05, | |
| "loss": 4.3264, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514619065058574e-05, | |
| "loss": 4.3146, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513780470307522e-05, | |
| "loss": 4.2995, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512943513436843e-05, | |
| "loss": 4.3173, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512104918685791e-05, | |
| "loss": 4.3115, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511266323934739e-05, | |
| "loss": 4.3268, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.510427729183687e-05, | |
| "loss": 4.3028, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509589134432635e-05, | |
| "loss": 4.316, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.508750539681583e-05, | |
| "loss": 4.3165, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507911944930531e-05, | |
| "loss": 4.3165, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507073350179479e-05, | |
| "loss": 4.3113, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5062363933088e-05, | |
| "loss": 4.3134, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.505397798557748e-05, | |
| "loss": 4.3031, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.50456084168707e-05, | |
| "loss": 4.3114, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.503722246936018e-05, | |
| "loss": 4.3107, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.502883652184966e-05, | |
| "loss": 4.3082, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.502045057433914e-05, | |
| "loss": 4.3, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.501206462682862e-05, | |
| "loss": 4.3134, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.50036786793181e-05, | |
| "loss": 4.3064, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.286992073059082, | |
| "eval_runtime": 294.0694, | |
| "eval_samples_per_second": 1297.622, | |
| "eval_steps_per_second": 40.552, | |
| "step": 305280 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 1.2543876023734512e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |