| { | |
| "best_metric": 4.129226207733154, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/0/checkpoints/checkpoint-610560", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 610560, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.8199, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 7.5523, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 7.0685, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 7.0067, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 6.957, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 6.9311, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 6.7662, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 6.6691, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 6.5638, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 6.4756, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 6.4027, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 6.333, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 6.259, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989098268236324e-05, | |
| "loss": 6.1928, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988259673485272e-05, | |
| "loss": 6.1457, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.98742107873422e-05, | |
| "loss": 6.0947, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986582483983168e-05, | |
| "loss": 6.0521, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985743889232116e-05, | |
| "loss": 6.0143, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984905294481064e-05, | |
| "loss": 5.9792, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.9405, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.9079, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.982391148108281e-05, | |
| "loss": 5.8683, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.981552553357229e-05, | |
| "loss": 5.8467, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 5.805, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 5.786, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 5.7539, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9781998122333946e-05, | |
| "loss": 5.7311, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773612174823426e-05, | |
| "loss": 5.7097, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.97652262273129e-05, | |
| "loss": 5.6784, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 5.6532, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748470711095595e-05, | |
| "loss": 5.6408, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974008476358507e-05, | |
| "loss": 5.6287, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.973169881607455e-05, | |
| "loss": 5.607, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9723329247367764e-05, | |
| "loss": 5.5839, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9714943299857244e-05, | |
| "loss": 5.5777, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9706557352346724e-05, | |
| "loss": 5.55, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9698171404836204e-05, | |
| "loss": 5.5363, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968980183612942e-05, | |
| "loss": 5.5033, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96814158886189e-05, | |
| "loss": 5.5068, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967302994110837e-05, | |
| "loss": 5.4845, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966464399359785e-05, | |
| "loss": 5.4754, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965625804608733e-05, | |
| "loss": 5.463, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 5.4566, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 5.4261, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 5.4201, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962273063484898e-05, | |
| "loss": 5.4147, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.961434468733847e-05, | |
| "loss": 5.3988, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 5.4009, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 5.3788, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 5.3582, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958081727610012e-05, | |
| "loss": 5.3658, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957244770739333e-05, | |
| "loss": 5.345, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 5.3389, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955567581237229e-05, | |
| "loss": 5.3113, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954728986486177e-05, | |
| "loss": 5.322, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 5.2986, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 5.3266, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 5.2873, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951376245362342e-05, | |
| "loss": 5.2876, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95053765061129e-05, | |
| "loss": 5.2791, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949699055860238e-05, | |
| "loss": 5.2604, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948860461109186e-05, | |
| "loss": 5.2471, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948021866358134e-05, | |
| "loss": 5.2476, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947183271607082e-05, | |
| "loss": 5.23, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94634467685603e-05, | |
| "loss": 5.2241, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945506082104978e-05, | |
| "loss": 5.2412, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944670763114672e-05, | |
| "loss": 5.2212, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94383216836362e-05, | |
| "loss": 5.2078, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942993573612568e-05, | |
| "loss": 5.1838, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942154978861516e-05, | |
| "loss": 5.1809, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941316384110464e-05, | |
| "loss": 5.1896, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404794272397856e-05, | |
| "loss": 5.1862, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396408324887336e-05, | |
| "loss": 5.1772, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 5.1737, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379636429866296e-05, | |
| "loss": 5.1672, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371266861159505e-05, | |
| "loss": 5.1589, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362880913648985e-05, | |
| "loss": 5.1456, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354494966138465e-05, | |
| "loss": 5.1354, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346109018627945e-05, | |
| "loss": 5.1299, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337739449921154e-05, | |
| "loss": 5.117, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329353502410634e-05, | |
| "loss": 5.1248, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9320967554900114e-05, | |
| "loss": 5.1149, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9312581607389594e-05, | |
| "loss": 5.1179, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9304195659879074e-05, | |
| "loss": 5.0993, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295809712368554e-05, | |
| "loss": 5.0797, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287423764858034e-05, | |
| "loss": 5.0947, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 5.0898, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927066824864073e-05, | |
| "loss": 5.0872, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.92622823011302e-05, | |
| "loss": 5.072, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925389635361968e-05, | |
| "loss": 5.0747, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924551040610916e-05, | |
| "loss": 5.0602, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923712445859864e-05, | |
| "loss": 5.0545, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922875488989185e-05, | |
| "loss": 5.0548, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922036894238133e-05, | |
| "loss": 5.0489, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921198299487081e-05, | |
| "loss": 5.0504, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920359704736029e-05, | |
| "loss": 5.0452, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919521109984978e-05, | |
| "loss": 5.0417, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918684153114299e-05, | |
| "loss": 5.0199, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917845558363247e-05, | |
| "loss": 5.017, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917006963612195e-05, | |
| "loss": 5.0148, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.916168368861143e-05, | |
| "loss": 5.0139, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9153314119904636e-05, | |
| "loss": 5.007, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9144928172394116e-05, | |
| "loss": 4.9996, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9136558603687325e-05, | |
| "loss": 4.9908, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128172656176805e-05, | |
| "loss": 4.9981, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119786708666285e-05, | |
| "loss": 4.9721, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111400761155765e-05, | |
| "loss": 4.9789, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9103014813645245e-05, | |
| "loss": 4.9768, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.909462886613473e-05, | |
| "loss": 4.9675, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908624291862421e-05, | |
| "loss": 4.9646, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907785697111369e-05, | |
| "loss": 4.9671, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90694874024069e-05, | |
| "loss": 4.9546, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906110145489638e-05, | |
| "loss": 4.9572, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905271550738586e-05, | |
| "loss": 4.9493, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904432955987534e-05, | |
| "loss": 4.9401, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903594361236482e-05, | |
| "loss": 4.9448, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90275576648543e-05, | |
| "loss": 4.9431, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901918809614751e-05, | |
| "loss": 4.9365, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901080214863699e-05, | |
| "loss": 4.9236, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.900241620112647e-05, | |
| "loss": 4.9297, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.899403025361595e-05, | |
| "loss": 4.9301, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.898564430610543e-05, | |
| "loss": 4.9193, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897725835859491e-05, | |
| "loss": 4.9062, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968872411084396e-05, | |
| "loss": 4.9138, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960502842377605e-05, | |
| "loss": 4.9149, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952133273670815e-05, | |
| "loss": 4.9012, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943747326160294e-05, | |
| "loss": 4.8942, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935361378649774e-05, | |
| "loss": 4.8847, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926975431139254e-05, | |
| "loss": 4.8918, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918589483628734e-05, | |
| "loss": 4.8822, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910203536118214e-05, | |
| "loss": 4.896, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890181758860769e-05, | |
| "loss": 4.8859, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889343164109717e-05, | |
| "loss": 4.8682, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888504569358665e-05, | |
| "loss": 4.8752, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887667612487986e-05, | |
| "loss": 4.8732, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886829017736934e-05, | |
| "loss": 4.8685, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885990422985882e-05, | |
| "loss": 4.8714, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88515182823483e-05, | |
| "loss": 4.8578, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884314871364151e-05, | |
| "loss": 4.8623, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883476276613099e-05, | |
| "loss": 4.8669, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882639319742421e-05, | |
| "loss": 4.851, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881800724991369e-05, | |
| "loss": 4.8473, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880962130240316e-05, | |
| "loss": 4.8484, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880123535489264e-05, | |
| "loss": 4.8337, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879284940738212e-05, | |
| "loss": 4.834, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87844634598716e-05, | |
| "loss": 4.8386, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877607751236109e-05, | |
| "loss": 4.8225, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.876769156485057e-05, | |
| "loss": 4.8276, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875930561734005e-05, | |
| "loss": 4.8316, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875093604863326e-05, | |
| "loss": 4.8273, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.783571720123291, | |
| "eval_runtime": 294.9308, | |
| "eval_samples_per_second": 1293.833, | |
| "eval_steps_per_second": 40.433, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874255010112274e-05, | |
| "loss": 4.8105, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873416415361222e-05, | |
| "loss": 4.8092, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.87257782061017e-05, | |
| "loss": 4.8259, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871739225859118e-05, | |
| "loss": 4.8087, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8709006311080657e-05, | |
| "loss": 4.821, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8700620363570137e-05, | |
| "loss": 4.7924, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8692234416059616e-05, | |
| "loss": 4.811, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683864847352826e-05, | |
| "loss": 4.7928, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675478899842306e-05, | |
| "loss": 4.7893, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667092952331785e-05, | |
| "loss": 4.794, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658723383625e-05, | |
| "loss": 4.7974, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865033743611448e-05, | |
| "loss": 4.7972, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.864196786740769e-05, | |
| "loss": 4.7793, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.863358191989717e-05, | |
| "loss": 4.779, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862519597238665e-05, | |
| "loss": 4.7706, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.861681002487613e-05, | |
| "loss": 4.7726, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860842407736561e-05, | |
| "loss": 4.7693, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860003812985509e-05, | |
| "loss": 4.7697, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.859165218234457e-05, | |
| "loss": 4.7662, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.858326623483405e-05, | |
| "loss": 4.786, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857488028732352e-05, | |
| "loss": 4.7612, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856649433981301e-05, | |
| "loss": 4.7702, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.855810839230249e-05, | |
| "loss": 4.7566, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.85497388235957e-05, | |
| "loss": 4.766, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8541369254888915e-05, | |
| "loss": 4.7548, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8532983307378395e-05, | |
| "loss": 4.7521, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8524597359867875e-05, | |
| "loss": 4.7483, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851621141235735e-05, | |
| "loss": 4.7439, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850782546484683e-05, | |
| "loss": 4.7285, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849943951733631e-05, | |
| "loss": 4.7434, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8491069948629524e-05, | |
| "loss": 4.7404, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8482684001119e-05, | |
| "loss": 4.7427, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847429805360848e-05, | |
| "loss": 4.7337, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8465912106097964e-05, | |
| "loss": 4.7371, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845754253739117e-05, | |
| "loss": 4.7279, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844915658988065e-05, | |
| "loss": 4.7245, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844077064237013e-05, | |
| "loss": 4.7085, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.843238469485961e-05, | |
| "loss": 4.7155, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.842399874734909e-05, | |
| "loss": 4.7158, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.841561279983857e-05, | |
| "loss": 4.7129, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840722685232805e-05, | |
| "loss": 4.7169, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839884090481753e-05, | |
| "loss": 4.7157, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839047133611074e-05, | |
| "loss": 4.7038, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838208538860022e-05, | |
| "loss": 4.7091, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.837371581989343e-05, | |
| "loss": 4.7088, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836532987238292e-05, | |
| "loss": 4.6963, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83569439248724e-05, | |
| "loss": 4.7082, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834855797736188e-05, | |
| "loss": 4.6943, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834017202985136e-05, | |
| "loss": 4.6808, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8331802461144566e-05, | |
| "loss": 4.7015, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8323416513634046e-05, | |
| "loss": 4.6847, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8315030566123526e-05, | |
| "loss": 4.6833, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8306644618613006e-05, | |
| "loss": 4.6728, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8298258671102486e-05, | |
| "loss": 4.6824, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289872723591966e-05, | |
| "loss": 4.6699, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281486776081446e-05, | |
| "loss": 4.7019, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273117207374655e-05, | |
| "loss": 4.6703, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826474763866787e-05, | |
| "loss": 4.6824, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825636169115735e-05, | |
| "loss": 4.68, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.824797574364683e-05, | |
| "loss": 4.6659, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823958979613631e-05, | |
| "loss": 4.6571, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823120384862579e-05, | |
| "loss": 4.6682, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.822281790111527e-05, | |
| "loss": 4.6531, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.821444833240848e-05, | |
| "loss": 4.6538, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820606238489796e-05, | |
| "loss": 4.6699, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819767643738744e-05, | |
| "loss": 4.6656, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818930686868065e-05, | |
| "loss": 4.6496, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818092092117013e-05, | |
| "loss": 4.6448, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817253497365961e-05, | |
| "loss": 4.6437, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816414902614909e-05, | |
| "loss": 4.6558, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.815576307863857e-05, | |
| "loss": 4.6542, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.814737713112805e-05, | |
| "loss": 4.6526, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8138991183617535e-05, | |
| "loss": 4.6522, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.813060523610701e-05, | |
| "loss": 4.653, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.812221928859649e-05, | |
| "loss": 4.6528, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.811383334108597e-05, | |
| "loss": 4.6353, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.810544739357545e-05, | |
| "loss": 4.6398, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.809706144606493e-05, | |
| "loss": 4.6367, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8088708256161873e-05, | |
| "loss": 4.6236, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808032230865135e-05, | |
| "loss": 4.6387, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8071936361140827e-05, | |
| "loss": 4.6355, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8063550413630307e-05, | |
| "loss": 4.638, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8055164466119786e-05, | |
| "loss": 4.6201, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804677851860927e-05, | |
| "loss": 4.6107, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803840894990248e-05, | |
| "loss": 4.6285, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803002300239196e-05, | |
| "loss": 4.6237, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802163705488144e-05, | |
| "loss": 4.6287, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801325110737092e-05, | |
| "loss": 4.6203, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800488153866413e-05, | |
| "loss": 4.6229, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799649559115361e-05, | |
| "loss": 4.6174, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798810964364309e-05, | |
| "loss": 4.6088, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797972369613257e-05, | |
| "loss": 4.6165, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797135412742578e-05, | |
| "loss": 4.612, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796296817991526e-05, | |
| "loss": 4.6197, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.795458223240474e-05, | |
| "loss": 4.6211, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.794619628489423e-05, | |
| "loss": 4.6148, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.793781033738371e-05, | |
| "loss": 4.6019, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929440768676916e-05, | |
| "loss": 4.6022, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7921054821166396e-05, | |
| "loss": 4.601, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912668873655876e-05, | |
| "loss": 4.6093, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7904282926145356e-05, | |
| "loss": 4.5961, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7895896978634836e-05, | |
| "loss": 4.5965, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7887527409928045e-05, | |
| "loss": 4.5899, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7879141462417525e-05, | |
| "loss": 4.6032, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7870755514907005e-05, | |
| "loss": 4.5757, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7862369567396485e-05, | |
| "loss": 4.5887, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7853983619885965e-05, | |
| "loss": 4.5922, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7845597672375445e-05, | |
| "loss": 4.5838, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7837211724864925e-05, | |
| "loss": 4.5821, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782884215615814e-05, | |
| "loss": 4.5925, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782045620864762e-05, | |
| "loss": 4.5793, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78120702611371e-05, | |
| "loss": 4.5808, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780368431362658e-05, | |
| "loss": 4.5851, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779529836611606e-05, | |
| "loss": 4.5729, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778691241860554e-05, | |
| "loss": 4.5832, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777852647109502e-05, | |
| "loss": 4.5878, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777015690238823e-05, | |
| "loss": 4.5787, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.776177095487771e-05, | |
| "loss": 4.567, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775338500736719e-05, | |
| "loss": 4.5788, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774499905985666e-05, | |
| "loss": 4.5771, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.773661311234615e-05, | |
| "loss": 4.5696, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772822716483563e-05, | |
| "loss": 4.5643, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771984121732511e-05, | |
| "loss": 4.5659, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771147164861832e-05, | |
| "loss": 4.5781, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.77030857011078e-05, | |
| "loss": 4.5668, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769469975359728e-05, | |
| "loss": 4.5633, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768631380608676e-05, | |
| "loss": 4.5522, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767794423737997e-05, | |
| "loss": 4.559, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766955828986945e-05, | |
| "loss": 4.5534, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766117234235893e-05, | |
| "loss": 4.5716, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765278639484841e-05, | |
| "loss": 4.5661, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7644416826141616e-05, | |
| "loss": 4.5461, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763604725743483e-05, | |
| "loss": 4.5557, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762766130992431e-05, | |
| "loss": 4.5582, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761927536241379e-05, | |
| "loss": 4.5571, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761088941490327e-05, | |
| "loss": 4.5623, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760250346739275e-05, | |
| "loss": 4.5477, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.759411751988223e-05, | |
| "loss": 4.553, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758573157237171e-05, | |
| "loss": 4.5582, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757734562486119e-05, | |
| "loss": 4.5464, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75689760561544e-05, | |
| "loss": 4.5466, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756059010864388e-05, | |
| "loss": 4.5526, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.755220416113336e-05, | |
| "loss": 4.5358, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.754383459242657e-05, | |
| "loss": 4.541, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.753544864491605e-05, | |
| "loss": 4.5459, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7527062697405536e-05, | |
| "loss": 4.5322, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518676749895016e-05, | |
| "loss": 4.5394, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510290802384496e-05, | |
| "loss": 4.5423, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7501921233677705e-05, | |
| "loss": 4.5391, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.501157283782959, | |
| "eval_runtime": 295.2349, | |
| "eval_samples_per_second": 1292.5, | |
| "eval_steps_per_second": 40.392, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7493535286167185e-05, | |
| "loss": 4.5248, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7485149338656665e-05, | |
| "loss": 4.5264, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7476763391146145e-05, | |
| "loss": 4.5456, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7468377443635625e-05, | |
| "loss": 4.525, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7459991496125105e-05, | |
| "loss": 4.5414, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451605548614585e-05, | |
| "loss": 4.521, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7443219601104065e-05, | |
| "loss": 4.5366, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434833653593545e-05, | |
| "loss": 4.5176, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7426447706083025e-05, | |
| "loss": 4.5283, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7418061758572505e-05, | |
| "loss": 4.524, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7409675811061985e-05, | |
| "loss": 4.5282, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7401289863551465e-05, | |
| "loss": 4.5336, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739292029484468e-05, | |
| "loss": 4.5139, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7384534347334154e-05, | |
| "loss": 4.5165, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7376148399823634e-05, | |
| "loss": 4.5107, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7367762452313114e-05, | |
| "loss": 4.5095, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7359376504802594e-05, | |
| "loss": 4.5125, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7350990557292074e-05, | |
| "loss": 4.5164, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7342604609781554e-05, | |
| "loss": 4.511, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7334218662271034e-05, | |
| "loss": 4.5335, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7325832714760514e-05, | |
| "loss": 4.5125, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7317446767249994e-05, | |
| "loss": 4.5184, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7309060819739474e-05, | |
| "loss": 4.5104, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7300674872228953e-05, | |
| "loss": 4.5194, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729230530352217e-05, | |
| "loss": 4.5091, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728391935601165e-05, | |
| "loss": 4.5036, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727553340850113e-05, | |
| "loss": 4.5065, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726716383979434e-05, | |
| "loss": 4.5019, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725877789228382e-05, | |
| "loss": 4.4872, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.72503919447733e-05, | |
| "loss": 4.5022, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724200599726278e-05, | |
| "loss": 4.5084, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.723362004975226e-05, | |
| "loss": 4.5033, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722523410224174e-05, | |
| "loss": 4.501, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721684815473121e-05, | |
| "loss": 4.5011, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720847858602443e-05, | |
| "loss": 4.4949, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720009263851391e-05, | |
| "loss": 4.4915, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719170669100339e-05, | |
| "loss": 4.4853, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718332074349287e-05, | |
| "loss": 4.4841, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717493479598235e-05, | |
| "loss": 4.4867, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716654884847183e-05, | |
| "loss": 4.4856, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.715816290096131e-05, | |
| "loss": 4.4935, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7149793332254516e-05, | |
| "loss": 4.4937, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7141407384743996e-05, | |
| "loss": 4.4824, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7133021437233476e-05, | |
| "loss": 4.4875, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7124635489722956e-05, | |
| "loss": 4.4918, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7116249542212436e-05, | |
| "loss": 4.4752, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7107879973505645e-05, | |
| "loss": 4.488, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7099494025995125e-05, | |
| "loss": 4.482, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7091108078484605e-05, | |
| "loss": 4.4618, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.708272213097409e-05, | |
| "loss": 4.484, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.707433618346357e-05, | |
| "loss": 4.476, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706595023595305e-05, | |
| "loss": 4.4713, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705758066724626e-05, | |
| "loss": 4.467, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704919471973574e-05, | |
| "loss": 4.4705, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704080877222522e-05, | |
| "loss": 4.4588, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.70324228247147e-05, | |
| "loss": 4.4879, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702403687720418e-05, | |
| "loss": 4.4663, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701565092969366e-05, | |
| "loss": 4.4757, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.700726498218314e-05, | |
| "loss": 4.4764, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699887903467262e-05, | |
| "loss": 4.4603, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699050946596583e-05, | |
| "loss": 4.4531, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.698212351845531e-05, | |
| "loss": 4.4687, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.697373757094479e-05, | |
| "loss": 4.4556, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6965351623434276e-05, | |
| "loss": 4.4501, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6956965675923756e-05, | |
| "loss": 4.4697, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6948596107216965e-05, | |
| "loss": 4.4685, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6940210159706445e-05, | |
| "loss": 4.453, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6931824212195925e-05, | |
| "loss": 4.4466, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69234382646854e-05, | |
| "loss": 4.4484, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6915068695978614e-05, | |
| "loss": 4.4571, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6906682748468094e-05, | |
| "loss": 4.4612, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68983131797613e-05, | |
| "loss": 4.4581, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688992723225078e-05, | |
| "loss": 4.4618, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688154128474026e-05, | |
| "loss": 4.4599, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.687315533722974e-05, | |
| "loss": 4.4621, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686476938971923e-05, | |
| "loss": 4.4447, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68563834422087e-05, | |
| "loss": 4.4505, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.684799749469818e-05, | |
| "loss": 4.4549, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683961154718766e-05, | |
| "loss": 4.4347, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683124197848087e-05, | |
| "loss": 4.4495, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.682285603097035e-05, | |
| "loss": 4.4557, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.681448646226357e-05, | |
| "loss": 4.454, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680610051475305e-05, | |
| "loss": 4.4393, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.679771456724252e-05, | |
| "loss": 4.4268, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6789328619732e-05, | |
| "loss": 4.4463, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678094267222148e-05, | |
| "loss": 4.4372, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677255672471097e-05, | |
| "loss": 4.4525, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676417077720045e-05, | |
| "loss": 4.4424, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6755801208493657e-05, | |
| "loss": 4.4447, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6747415260983136e-05, | |
| "loss": 4.4396, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739029313472616e-05, | |
| "loss": 4.429, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6730643365962096e-05, | |
| "loss": 4.4384, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722257418451576e-05, | |
| "loss": 4.4359, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6713887849744785e-05, | |
| "loss": 4.4466, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6705501902234265e-05, | |
| "loss": 4.4483, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6697115954723745e-05, | |
| "loss": 4.4391, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6688730007213225e-05, | |
| "loss": 4.4268, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6680344059702705e-05, | |
| "loss": 4.4306, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.667197449099592e-05, | |
| "loss": 4.4308, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66635885434854e-05, | |
| "loss": 4.4381, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665520259597488e-05, | |
| "loss": 4.4259, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664681664846436e-05, | |
| "loss": 4.429, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663843070095384e-05, | |
| "loss": 4.4182, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663004475344332e-05, | |
| "loss": 4.4377, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66216588059328e-05, | |
| "loss": 4.4097, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661327285842228e-05, | |
| "loss": 4.4255, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660490328971549e-05, | |
| "loss": 4.4168, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659651734220497e-05, | |
| "loss": 4.4227, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.658814777349818e-05, | |
| "loss": 4.4189, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657976182598766e-05, | |
| "loss": 4.4286, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657137587847714e-05, | |
| "loss": 4.4218, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656298993096662e-05, | |
| "loss": 4.4156, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65546039834561e-05, | |
| "loss": 4.4232, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6546218035945586e-05, | |
| "loss": 4.4093, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6537832088435065e-05, | |
| "loss": 4.4272, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6529462519728275e-05, | |
| "loss": 4.4325, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6521076572217755e-05, | |
| "loss": 4.4164, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6512690624707234e-05, | |
| "loss": 4.4114, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650430467719671e-05, | |
| "loss": 4.4163, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.649591872968619e-05, | |
| "loss": 4.4182, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.648753278217567e-05, | |
| "loss": 4.4116, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647914683466515e-05, | |
| "loss": 4.409, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647076088715463e-05, | |
| "loss": 4.4077, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6462391318447837e-05, | |
| "loss": 4.4228, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.645400537093732e-05, | |
| "loss": 4.4132, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64456194234268e-05, | |
| "loss": 4.4082, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.643723347591628e-05, | |
| "loss": 4.3981, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642886390720949e-05, | |
| "loss": 4.4092, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642049433850271e-05, | |
| "loss": 4.3982, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.641210839099218e-05, | |
| "loss": 4.4197, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640372244348166e-05, | |
| "loss": 4.4142, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639533649597114e-05, | |
| "loss": 4.4004, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638695054846062e-05, | |
| "loss": 4.3987, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63785646009501e-05, | |
| "loss": 4.4089, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637017865343958e-05, | |
| "loss": 4.4054, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636180908473279e-05, | |
| "loss": 4.416, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635342313722228e-05, | |
| "loss": 4.3962, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.634503718971176e-05, | |
| "loss": 4.409, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633665124220124e-05, | |
| "loss": 4.4051, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632826529469072e-05, | |
| "loss": 4.4023, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6319895725983926e-05, | |
| "loss": 4.4018, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6311509778473406e-05, | |
| "loss": 4.4052, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303123830962886e-05, | |
| "loss": 4.3922, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6294737883452366e-05, | |
| "loss": 4.3982, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6286351935941846e-05, | |
| "loss": 4.3989, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6277965988431326e-05, | |
| "loss": 4.3928, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6269580040920806e-05, | |
| "loss": 4.392, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6261194093410286e-05, | |
| "loss": 4.3987, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6252824524703495e-05, | |
| "loss": 4.3969, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.366720676422119, | |
| "eval_runtime": 289.5048, | |
| "eval_samples_per_second": 1318.082, | |
| "eval_steps_per_second": 41.191, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6244438577192975e-05, | |
| "loss": 4.3889, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.623605262968246e-05, | |
| "loss": 4.3823, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.622766668217194e-05, | |
| "loss": 4.4056, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621929711346515e-05, | |
| "loss": 4.3848, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621091116595463e-05, | |
| "loss": 4.4053, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620252521844411e-05, | |
| "loss": 4.3792, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.619413927093359e-05, | |
| "loss": 4.3985, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61857697022268e-05, | |
| "loss": 4.3824, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617738375471628e-05, | |
| "loss": 4.3863, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616899780720576e-05, | |
| "loss": 4.3893, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616061185969524e-05, | |
| "loss": 4.3889, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.615224229098845e-05, | |
| "loss": 4.3956, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6143872722281664e-05, | |
| "loss": 4.3801, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6135486774771144e-05, | |
| "loss": 4.3781, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6127100827260624e-05, | |
| "loss": 4.3828, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6118714879750104e-05, | |
| "loss": 4.3699, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6110328932239584e-05, | |
| "loss": 4.3798, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6101942984729064e-05, | |
| "loss": 4.3834, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6093557037218544e-05, | |
| "loss": 4.3773, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.608517108970802e-05, | |
| "loss": 4.3973, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.607680152100123e-05, | |
| "loss": 4.3847, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606841557349071e-05, | |
| "loss": 4.3893, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606002962598019e-05, | |
| "loss": 4.3779, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6051643678469666e-05, | |
| "loss": 4.3846, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.604325773095915e-05, | |
| "loss": 4.3797, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.603487178344863e-05, | |
| "loss": 4.3739, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.602648583593811e-05, | |
| "loss": 4.3773, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.601809988842759e-05, | |
| "loss": 4.3702, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.600974669852454e-05, | |
| "loss": 4.3643, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.600136075101402e-05, | |
| "loss": 4.3692, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599297480350349e-05, | |
| "loss": 4.3771, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.598458885599297e-05, | |
| "loss": 4.377, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.597620290848245e-05, | |
| "loss": 4.3738, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.596783333977567e-05, | |
| "loss": 4.3764, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595944739226514e-05, | |
| "loss": 4.3613, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595106144475462e-05, | |
| "loss": 4.3664, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.59426754972441e-05, | |
| "loss": 4.3655, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5934289549733587e-05, | |
| "loss": 4.3552, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5925919981026796e-05, | |
| "loss": 4.3642, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5917534033516276e-05, | |
| "loss": 4.3588, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5909148086005756e-05, | |
| "loss": 4.3704, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5900762138495235e-05, | |
| "loss": 4.3693, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5892376190984715e-05, | |
| "loss": 4.3644, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5883990243474195e-05, | |
| "loss": 4.361, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5875604295963675e-05, | |
| "loss": 4.3729, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5867218348453155e-05, | |
| "loss": 4.3544, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5858865158550094e-05, | |
| "loss": 4.3659, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5850479211039573e-05, | |
| "loss": 4.3599, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5842093263529053e-05, | |
| "loss": 4.3423, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.583370731601854e-05, | |
| "loss": 4.3669, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.582532136850802e-05, | |
| "loss": 4.352, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.58169354209975e-05, | |
| "loss": 4.3565, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580854947348698e-05, | |
| "loss": 4.3435, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580016352597646e-05, | |
| "loss": 4.3514, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.579179395726967e-05, | |
| "loss": 4.3398, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.578340800975915e-05, | |
| "loss": 4.3646, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577502206224863e-05, | |
| "loss": 4.3506, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576663611473811e-05, | |
| "loss": 4.3556, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.575826654603132e-05, | |
| "loss": 4.3605, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.57498805985208e-05, | |
| "loss": 4.3462, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574149465101028e-05, | |
| "loss": 4.3347, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.573310870349976e-05, | |
| "loss": 4.3557, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.572472275598924e-05, | |
| "loss": 4.3381, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5716353187282454e-05, | |
| "loss": 4.3373, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5707967239771934e-05, | |
| "loss": 4.354, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5699581292261414e-05, | |
| "loss": 4.3518, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5691195344750894e-05, | |
| "loss": 4.3405, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5682809397240374e-05, | |
| "loss": 4.3363, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5674423449729854e-05, | |
| "loss": 4.331, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.566603750221933e-05, | |
| "loss": 4.341, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.565765155470881e-05, | |
| "loss": 4.349, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564928198600202e-05, | |
| "loss": 4.3481, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5640896038491496e-05, | |
| "loss": 4.348, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563252646978471e-05, | |
| "loss": 4.3436, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.562414052227419e-05, | |
| "loss": 4.3567, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.561575457476368e-05, | |
| "loss": 4.3319, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.560736862725315e-05, | |
| "loss": 4.3445, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559898267974263e-05, | |
| "loss": 4.3403, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559059673223211e-05, | |
| "loss": 4.3236, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.558222716352533e-05, | |
| "loss": 4.3454, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55738412160148e-05, | |
| "loss": 4.3447, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556545526850428e-05, | |
| "loss": 4.34, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.555706932099376e-05, | |
| "loss": 4.3322, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554868337348324e-05, | |
| "loss": 4.3233, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554029742597272e-05, | |
| "loss": 4.3375, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553192785726593e-05, | |
| "loss": 4.3349, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5523541909755416e-05, | |
| "loss": 4.3414, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5515155962244896e-05, | |
| "loss": 4.3309, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5506770014734376e-05, | |
| "loss": 4.3407, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5498384067223856e-05, | |
| "loss": 4.3276, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5490014498517065e-05, | |
| "loss": 4.3284, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5481628551006545e-05, | |
| "loss": 4.3281, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5473242603496025e-05, | |
| "loss": 4.3271, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5464856655985505e-05, | |
| "loss": 4.344, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5456470708474985e-05, | |
| "loss": 4.3401, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5448084760964465e-05, | |
| "loss": 4.3402, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5439698813453945e-05, | |
| "loss": 4.3196, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5431329244747154e-05, | |
| "loss": 4.3279, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5422943297236634e-05, | |
| "loss": 4.3273, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5414557349726114e-05, | |
| "loss": 4.3318, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406171402215594e-05, | |
| "loss": 4.325, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.539778545470508e-05, | |
| "loss": 4.3228, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.538941588599829e-05, | |
| "loss": 4.3162, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.538102993848777e-05, | |
| "loss": 4.3378, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.537264399097725e-05, | |
| "loss": 4.3069, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.536425804346673e-05, | |
| "loss": 4.3214, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.535588847475994e-05, | |
| "loss": 4.3184, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.534750252724942e-05, | |
| "loss": 4.3202, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.53391165797389e-05, | |
| "loss": 4.3169, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533073063222838e-05, | |
| "loss": 4.3299, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.532234468471786e-05, | |
| "loss": 4.3208, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.531395873720733e-05, | |
| "loss": 4.3117, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530557278969682e-05, | |
| "loss": 4.3274, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52971868421863e-05, | |
| "loss": 4.3115, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5288817273479514e-05, | |
| "loss": 4.3247, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528043132596899e-05, | |
| "loss": 4.3332, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527204537845847e-05, | |
| "loss": 4.3209, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.526367580975168e-05, | |
| "loss": 4.3148, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.525528986224116e-05, | |
| "loss": 4.3148, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5246903914730636e-05, | |
| "loss": 4.3177, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5238517967220116e-05, | |
| "loss": 4.3148, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5230132019709596e-05, | |
| "loss": 4.3174, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5221746072199076e-05, | |
| "loss": 4.3039, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5213360124688556e-05, | |
| "loss": 4.3252, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5204974177178036e-05, | |
| "loss": 4.3191, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.519660460847125e-05, | |
| "loss": 4.314, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.518821866096073e-05, | |
| "loss": 4.2988, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517983271345021e-05, | |
| "loss": 4.3145, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517144676593969e-05, | |
| "loss": 4.3048, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.51630771972329e-05, | |
| "loss": 4.3191, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.515469124972238e-05, | |
| "loss": 4.3221, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514630530221186e-05, | |
| "loss": 4.3113, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513791935470134e-05, | |
| "loss": 4.2971, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512954978599455e-05, | |
| "loss": 4.3141, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512116383848403e-05, | |
| "loss": 4.3074, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511277789097351e-05, | |
| "loss": 4.3235, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.510439194346299e-05, | |
| "loss": 4.2996, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509600599595247e-05, | |
| "loss": 4.3125, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5087620048441956e-05, | |
| "loss": 4.3132, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5079234100931436e-05, | |
| "loss": 4.3147, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5070848153420916e-05, | |
| "loss": 4.3085, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5062478584714125e-05, | |
| "loss": 4.3104, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5054092637203605e-05, | |
| "loss": 4.2986, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5045706689693085e-05, | |
| "loss": 4.3088, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5037320742182565e-05, | |
| "loss": 4.3063, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5028951173475774e-05, | |
| "loss": 4.3043, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5020565225965254e-05, | |
| "loss": 4.2991, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5012179278454734e-05, | |
| "loss": 4.3092, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5003793330944214e-05, | |
| "loss": 4.3045, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.283578872680664, | |
| "eval_runtime": 289.1657, | |
| "eval_samples_per_second": 1319.628, | |
| "eval_steps_per_second": 41.239, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4995407383433694e-05, | |
| "loss": 4.2952, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4987021435923174e-05, | |
| "loss": 4.2927, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4978635488412654e-05, | |
| "loss": 4.3131, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4970249540902134e-05, | |
| "loss": 4.2974, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4961863593391614e-05, | |
| "loss": 4.3094, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4953477645881094e-05, | |
| "loss": 4.2973, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4945091698370574e-05, | |
| "loss": 4.3028, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4936705750860054e-05, | |
| "loss": 4.2954, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4928319803349534e-05, | |
| "loss": 4.2965, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4919933855839014e-05, | |
| "loss": 4.3007, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4911547908328494e-05, | |
| "loss": 4.3013, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4903161960817974e-05, | |
| "loss": 4.3081, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.489479239211118e-05, | |
| "loss": 4.2918, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.488640644460066e-05, | |
| "loss": 4.2911, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.487802049709014e-05, | |
| "loss": 4.2929, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486963454957962e-05, | |
| "loss": 4.2816, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48612486020691e-05, | |
| "loss": 4.2918, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.485286265455859e-05, | |
| "loss": 4.2952, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.484447670704806e-05, | |
| "loss": 4.29, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.483609075953754e-05, | |
| "loss": 4.311, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.482770481202702e-05, | |
| "loss": 4.2976, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48193188645165e-05, | |
| "loss": 4.3043, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481094929580971e-05, | |
| "loss": 4.294, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.480256334829919e-05, | |
| "loss": 4.3001, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.479417740078867e-05, | |
| "loss": 4.29, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.478579145327815e-05, | |
| "loss": 4.2928, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.477740550576763e-05, | |
| "loss": 4.2898, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476901955825711e-05, | |
| "loss": 4.2864, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476063361074659e-05, | |
| "loss": 4.2821, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.475224766323607e-05, | |
| "loss": 4.2835, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.474387809452929e-05, | |
| "loss": 4.2897, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.473549214701877e-05, | |
| "loss": 4.2953, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.472710619950825e-05, | |
| "loss": 4.2896, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.471872025199773e-05, | |
| "loss": 4.2921, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4710350683290936e-05, | |
| "loss": 4.2777, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4701964735780416e-05, | |
| "loss": 4.2841, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4693578788269896e-05, | |
| "loss": 4.2826, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4685192840759376e-05, | |
| "loss": 4.2713, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4676806893248856e-05, | |
| "loss": 4.2843, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4668453703345794e-05, | |
| "loss": 4.274, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.466006775583528e-05, | |
| "loss": 4.2891, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.465168180832476e-05, | |
| "loss": 4.2866, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.464329586081424e-05, | |
| "loss": 4.2819, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.463490991330372e-05, | |
| "loss": 4.2746, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.46265239657932e-05, | |
| "loss": 4.2928, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.461813801828268e-05, | |
| "loss": 4.272, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.460975207077216e-05, | |
| "loss": 4.2865, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.460138250206537e-05, | |
| "loss": 4.2761, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.459299655455485e-05, | |
| "loss": 4.2603, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.458461060704433e-05, | |
| "loss": 4.2859, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.457622465953381e-05, | |
| "loss": 4.2719, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.456783871202329e-05, | |
| "loss": 4.2742, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455945276451277e-05, | |
| "loss": 4.2676, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455108319580598e-05, | |
| "loss": 4.2663, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4542697248295465e-05, | |
| "loss": 4.265, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4534327679588674e-05, | |
| "loss": 4.2775, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4525941732078154e-05, | |
| "loss": 4.2743, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.451757216337136e-05, | |
| "loss": 4.2762, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450918621586084e-05, | |
| "loss": 4.2811, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450080026835032e-05, | |
| "loss": 4.2695, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44924143208398e-05, | |
| "loss": 4.2562, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.448402837332928e-05, | |
| "loss": 4.2777, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.447564242581876e-05, | |
| "loss": 4.2611, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.446725647830824e-05, | |
| "loss": 4.2558, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4458870530797716e-05, | |
| "loss": 4.2786, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44504845832872e-05, | |
| "loss": 4.275, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.444211501458041e-05, | |
| "loss": 4.2636, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44337290670699e-05, | |
| "loss": 4.2599, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.442534311955937e-05, | |
| "loss": 4.2511, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.441695717204885e-05, | |
| "loss": 4.2637, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440857122453833e-05, | |
| "loss": 4.2746, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440018527702781e-05, | |
| "loss": 4.2705, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.439179932951729e-05, | |
| "loss": 4.2683, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.438341338200677e-05, | |
| "loss": 4.2664, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.437504381329998e-05, | |
| "loss": 4.2836, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.436665786578946e-05, | |
| "loss": 4.2559, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435827191827894e-05, | |
| "loss": 4.2696, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434988597076842e-05, | |
| "loss": 4.2667, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434151640206164e-05, | |
| "loss": 4.242, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4333130454551117e-05, | |
| "loss": 4.273, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4324744507040597e-05, | |
| "loss": 4.2668, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4316358559530076e-05, | |
| "loss": 4.2655, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4307988990823286e-05, | |
| "loss": 4.2595, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4299603043312766e-05, | |
| "loss": 4.2449, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4291233474605975e-05, | |
| "loss": 4.2619, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4282847527095455e-05, | |
| "loss": 4.2599, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4274461579584935e-05, | |
| "loss": 4.2684, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4266075632074415e-05, | |
| "loss": 4.2563, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4257689684563894e-05, | |
| "loss": 4.2646, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4249303737053374e-05, | |
| "loss": 4.2539, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.424093416834659e-05, | |
| "loss": 4.2571, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.423254822083607e-05, | |
| "loss": 4.2528, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.422416227332555e-05, | |
| "loss": 4.2526, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.421577632581503e-05, | |
| "loss": 4.2698, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.420739037830451e-05, | |
| "loss": 4.2666, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419900443079399e-05, | |
| "loss": 4.2709, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419061848328347e-05, | |
| "loss": 4.2437, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.418223253577295e-05, | |
| "loss": 4.2552, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.417384658826243e-05, | |
| "loss": 4.2551, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.416549339835937e-05, | |
| "loss": 4.2588, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.415710745084885e-05, | |
| "loss": 4.2563, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414872150333833e-05, | |
| "loss": 4.2482, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414033555582781e-05, | |
| "loss": 4.2473, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.413194960831729e-05, | |
| "loss": 4.2641, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4123563660806775e-05, | |
| "loss": 4.2362, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4115177713296255e-05, | |
| "loss": 4.2504, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4106791765785735e-05, | |
| "loss": 4.2429, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4098422197078944e-05, | |
| "loss": 4.2551, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409005262837215e-05, | |
| "loss": 4.2465, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.408166668086163e-05, | |
| "loss": 4.2559, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.407328073335111e-05, | |
| "loss": 4.2511, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.406489478584059e-05, | |
| "loss": 4.2421, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.405650883833007e-05, | |
| "loss": 4.2569, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404813926962328e-05, | |
| "loss": 4.2432, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.403975332211276e-05, | |
| "loss": 4.2556, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.403136737460224e-05, | |
| "loss": 4.261, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.402298142709173e-05, | |
| "loss": 4.2496, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.40145954795812e-05, | |
| "loss": 4.2446, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.400620953207068e-05, | |
| "loss": 4.247, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.39978399633639e-05, | |
| "loss": 4.2464, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398945401585338e-05, | |
| "loss": 4.2472, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398106806834285e-05, | |
| "loss": 4.2479, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.397268212083233e-05, | |
| "loss": 4.2385, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.396429617332181e-05, | |
| "loss": 4.2559, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.395591022581129e-05, | |
| "loss": 4.2493, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.39475406571045e-05, | |
| "loss": 4.2494, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.393915470959398e-05, | |
| "loss": 4.2331, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3930768762083466e-05, | |
| "loss": 4.244, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3922382814572946e-05, | |
| "loss": 4.2363, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3913996867062426e-05, | |
| "loss": 4.2493, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3905610919551906e-05, | |
| "loss": 4.252, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3897224972041386e-05, | |
| "loss": 4.2466, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3888839024530866e-05, | |
| "loss": 4.2279, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3880469455824075e-05, | |
| "loss": 4.249, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3872083508313555e-05, | |
| "loss": 4.2417, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3863697560803035e-05, | |
| "loss": 4.2538, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3855311613292515e-05, | |
| "loss": 4.2329, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3846942044585724e-05, | |
| "loss": 4.2476, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3838556097075204e-05, | |
| "loss": 4.2448, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3830170149564684e-05, | |
| "loss": 4.2477, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3821784202054164e-05, | |
| "loss": 4.2408, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3813398254543644e-05, | |
| "loss": 4.2429, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.380501230703313e-05, | |
| "loss": 4.2313, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.379662635952261e-05, | |
| "loss": 4.243, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.378824041201209e-05, | |
| "loss": 4.2408, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.37798708433053e-05, | |
| "loss": 4.2386, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.377148489579478e-05, | |
| "loss": 4.2339, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.376311532708799e-05, | |
| "loss": 4.2408, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.375472937957747e-05, | |
| "loss": 4.2419, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.226221561431885, | |
| "eval_runtime": 313.8499, | |
| "eval_samples_per_second": 1215.839, | |
| "eval_steps_per_second": 37.996, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.374634343206695e-05, | |
| "loss": 4.2363, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.373795748455643e-05, | |
| "loss": 4.2276, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372957153704591e-05, | |
| "loss": 4.2467, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372118558953539e-05, | |
| "loss": 4.2317, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.371279964202487e-05, | |
| "loss": 4.2425, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.370441369451435e-05, | |
| "loss": 4.2353, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3696044125807564e-05, | |
| "loss": 4.2354, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.368765817829704e-05, | |
| "loss": 4.2354, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367928860959025e-05, | |
| "loss": 4.2322, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367091904088346e-05, | |
| "loss": 4.2366, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.366253309337294e-05, | |
| "loss": 4.2404, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.365414714586242e-05, | |
| "loss": 4.2396, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.36457611983519e-05, | |
| "loss": 4.23, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.363737525084138e-05, | |
| "loss": 4.2267, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.362898930333086e-05, | |
| "loss": 4.2304, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3620603355820335e-05, | |
| "loss": 4.2144, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.361221740830982e-05, | |
| "loss": 4.2298, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.360384783960304e-05, | |
| "loss": 4.2297, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.359546189209251e-05, | |
| "loss": 4.2289, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.358707594458199e-05, | |
| "loss": 4.2471, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357868999707147e-05, | |
| "loss": 4.2371, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357030404956095e-05, | |
| "loss": 4.2418, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.356191810205043e-05, | |
| "loss": 4.2327, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.355354853334364e-05, | |
| "loss": 4.2333, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.354516258583312e-05, | |
| "loss": 4.229, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.35367766383226e-05, | |
| "loss": 4.2301, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352839069081208e-05, | |
| "loss": 4.2259, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352000474330156e-05, | |
| "loss": 4.227, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.351161879579104e-05, | |
| "loss": 4.2217, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.350323284828052e-05, | |
| "loss": 4.2237, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3494846900770006e-05, | |
| "loss": 4.2269, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3486477332063216e-05, | |
| "loss": 4.2327, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3478091384552696e-05, | |
| "loss": 4.2276, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3469705437042175e-05, | |
| "loss": 4.2332, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3461319489531655e-05, | |
| "loss": 4.2162, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3452949920824865e-05, | |
| "loss": 4.2246, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3444563973314344e-05, | |
| "loss": 4.2216, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3436178025803824e-05, | |
| "loss": 4.2117, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3427792078293304e-05, | |
| "loss": 4.2251, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3419422509586514e-05, | |
| "loss": 4.2092, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3411036562075993e-05, | |
| "loss": 4.2263, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3402650614565473e-05, | |
| "loss": 4.2268, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.339429742466242e-05, | |
| "loss": 4.226, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.33859114771519e-05, | |
| "loss": 4.219, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.337752552964138e-05, | |
| "loss": 4.2316, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336913958213086e-05, | |
| "loss": 4.2112, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336077001342407e-05, | |
| "loss": 4.2304, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.335238406591355e-05, | |
| "loss": 4.2167, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.334399811840303e-05, | |
| "loss": 4.1958, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.333561217089251e-05, | |
| "loss": 4.2308, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.332722622338199e-05, | |
| "loss": 4.2125, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331884027587147e-05, | |
| "loss": 4.2197, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331045432836095e-05, | |
| "loss": 4.2062, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.330206838085043e-05, | |
| "loss": 4.2085, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3293682433339914e-05, | |
| "loss": 4.2118, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3285296485829394e-05, | |
| "loss": 4.2142, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3276910538318874e-05, | |
| "loss": 4.2187, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326852459080835e-05, | |
| "loss": 4.2153, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326013864329783e-05, | |
| "loss": 4.2259, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.325175269578731e-05, | |
| "loss": 4.2097, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.324336674827679e-05, | |
| "loss": 4.1991, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3234997179569996e-05, | |
| "loss": 4.2188, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.322662761086321e-05, | |
| "loss": 4.1981, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.321824166335269e-05, | |
| "loss": 4.2006, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.32098720946459e-05, | |
| "loss": 4.22, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.320150252593912e-05, | |
| "loss": 4.2183, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.31931165784286e-05, | |
| "loss": 4.204, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3184730630918077e-05, | |
| "loss": 4.2064, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3176344683407557e-05, | |
| "loss": 4.195, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3167958735897036e-05, | |
| "loss": 4.2059, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3159572788386516e-05, | |
| "loss": 4.2162, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3151186840875996e-05, | |
| "loss": 4.2136, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.314280089336547e-05, | |
| "loss": 4.213, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.313441494585495e-05, | |
| "loss": 4.2097, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.312602899834443e-05, | |
| "loss": 4.2257, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.311764305083391e-05, | |
| "loss": 4.1977, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310925710332339e-05, | |
| "loss": 4.2133, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310087115581287e-05, | |
| "loss": 4.2097, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3092501587106085e-05, | |
| "loss": 4.1898, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3084115639595565e-05, | |
| "loss": 4.2147, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3075729692085045e-05, | |
| "loss": 4.212, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3067343744574525e-05, | |
| "loss": 4.2096, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3058957797064005e-05, | |
| "loss": 4.2062, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3050571849553485e-05, | |
| "loss": 4.1941, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3042202280846694e-05, | |
| "loss": 4.2, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3033816333336174e-05, | |
| "loss": 4.2057, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.302544676462938e-05, | |
| "loss": 4.2132, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.301706081711886e-05, | |
| "loss": 4.2, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.300867486960834e-05, | |
| "loss": 4.2112, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.300028892209782e-05, | |
| "loss": 4.1988, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.29919029745873e-05, | |
| "loss": 4.2048, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.298351702707678e-05, | |
| "loss": 4.1974, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.297513107956627e-05, | |
| "loss": 4.2007, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.296676151085948e-05, | |
| "loss": 4.2156, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.295837556334896e-05, | |
| "loss": 4.2127, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.294998961583844e-05, | |
| "loss": 4.2156, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.294160366832792e-05, | |
| "loss": 4.1881, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.29332177208174e-05, | |
| "loss": 4.2026, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.292483177330688e-05, | |
| "loss": 4.1981, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.291644582579636e-05, | |
| "loss": 4.2083, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.290805987828583e-05, | |
| "loss": 4.1981, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289967393077531e-05, | |
| "loss": 4.1996, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289128798326479e-05, | |
| "loss": 4.1911, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.288290203575427e-05, | |
| "loss": 4.2079, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.287451608824375e-05, | |
| "loss": 4.1833, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.286614651953697e-05, | |
| "loss": 4.2001, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.285776057202645e-05, | |
| "loss": 4.1848, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284937462451593e-05, | |
| "loss": 4.2044, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284098867700541e-05, | |
| "loss": 4.1956, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.283260272949489e-05, | |
| "loss": 4.1979, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2824233160788096e-05, | |
| "loss": 4.1962, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2815847213277576e-05, | |
| "loss": 4.1962, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2807461265767056e-05, | |
| "loss": 4.1998, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2799075318256536e-05, | |
| "loss": 4.1923, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2790689370746016e-05, | |
| "loss": 4.1982, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2782303423235496e-05, | |
| "loss": 4.213, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2773917475724976e-05, | |
| "loss": 4.1994, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.276554790701819e-05, | |
| "loss": 4.191, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.275716195950767e-05, | |
| "loss": 4.1968, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274877601199715e-05, | |
| "loss": 4.1958, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274039006448663e-05, | |
| "loss": 4.189, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.273200411697611e-05, | |
| "loss": 4.1989, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.272361816946559e-05, | |
| "loss": 4.1855, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.271523222195507e-05, | |
| "loss": 4.203, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2706846274444545e-05, | |
| "loss": 4.2019, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269847670573776e-05, | |
| "loss": 4.192, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269009075822724e-05, | |
| "loss": 4.1841, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.268172118952045e-05, | |
| "loss": 4.1908, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.267333524200993e-05, | |
| "loss": 4.1903, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.266494929449941e-05, | |
| "loss": 4.1986, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.265656334698889e-05, | |
| "loss": 4.1957, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264817739947837e-05, | |
| "loss": 4.1983, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.263979145196785e-05, | |
| "loss": 4.1767, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2631421883261065e-05, | |
| "loss": 4.1984, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2623035935750545e-05, | |
| "loss": 4.1912, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.261464998824002e-05, | |
| "loss": 4.2025, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.26062640407295e-05, | |
| "loss": 4.1813, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.259787809321898e-05, | |
| "loss": 4.2015, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258949214570846e-05, | |
| "loss": 4.1912, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258110619819794e-05, | |
| "loss": 4.1988, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.257273662949115e-05, | |
| "loss": 4.1854, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.256435068198063e-05, | |
| "loss": 4.2003, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.255596473447011e-05, | |
| "loss": 4.1774, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2547578786959594e-05, | |
| "loss": 4.1944, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.25392092182528e-05, | |
| "loss": 4.1918, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253082327074228e-05, | |
| "loss": 4.1856, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.252243732323176e-05, | |
| "loss": 4.1891, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.251405137572124e-05, | |
| "loss": 4.1859, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.250568180701445e-05, | |
| "loss": 4.192, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.185070514678955, | |
| "eval_runtime": 304.5595, | |
| "eval_samples_per_second": 1252.928, | |
| "eval_steps_per_second": 39.155, | |
| "step": 457920 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.249729585950393e-05, | |
| "loss": 4.1901, | |
| "step": 458240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.248890991199341e-05, | |
| "loss": 4.1763, | |
| "step": 458752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.248052396448289e-05, | |
| "loss": 4.1959, | |
| "step": 459264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.247213801697237e-05, | |
| "loss": 4.1826, | |
| "step": 459776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.246375206946185e-05, | |
| "loss": 4.1932, | |
| "step": 460288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.245536612195133e-05, | |
| "loss": 4.1856, | |
| "step": 460800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.244698017444081e-05, | |
| "loss": 4.1882, | |
| "step": 461312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.243859422693029e-05, | |
| "loss": 4.1875, | |
| "step": 461824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.243020827941978e-05, | |
| "loss": 4.1837, | |
| "step": 462336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.242182233190926e-05, | |
| "loss": 4.1877, | |
| "step": 462848 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.241343638439873e-05, | |
| "loss": 4.1903, | |
| "step": 463360 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.240505043688821e-05, | |
| "loss": 4.1896, | |
| "step": 463872 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.239668086818143e-05, | |
| "loss": 4.1829, | |
| "step": 464384 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.238829492067091e-05, | |
| "loss": 4.1786, | |
| "step": 464896 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.237990897316038e-05, | |
| "loss": 4.1829, | |
| "step": 465408 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.237152302564986e-05, | |
| "loss": 4.1661, | |
| "step": 465920 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.236313707813934e-05, | |
| "loss": 4.1836, | |
| "step": 466432 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.235475113062882e-05, | |
| "loss": 4.1799, | |
| "step": 466944 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.23463651831183e-05, | |
| "loss": 4.1794, | |
| "step": 467456 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.233797923560778e-05, | |
| "loss": 4.1995, | |
| "step": 467968 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.232959328809726e-05, | |
| "loss": 4.1925, | |
| "step": 468480 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.232120734058675e-05, | |
| "loss": 4.1943, | |
| "step": 468992 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.231282139307623e-05, | |
| "loss": 4.1808, | |
| "step": 469504 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.230443544556571e-05, | |
| "loss": 4.1861, | |
| "step": 470016 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2296065876858916e-05, | |
| "loss": 4.1828, | |
| "step": 470528 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2287679929348396e-05, | |
| "loss": 4.1832, | |
| "step": 471040 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2279293981837876e-05, | |
| "loss": 4.18, | |
| "step": 471552 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2270908034327356e-05, | |
| "loss": 4.18, | |
| "step": 472064 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2262522086816836e-05, | |
| "loss": 4.1718, | |
| "step": 472576 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2254152518110045e-05, | |
| "loss": 4.1761, | |
| "step": 473088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2245766570599525e-05, | |
| "loss": 4.1773, | |
| "step": 473600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2237380623089005e-05, | |
| "loss": 4.1873, | |
| "step": 474112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2228994675578485e-05, | |
| "loss": 4.1818, | |
| "step": 474624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.22206251068717e-05, | |
| "loss": 4.1856, | |
| "step": 475136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.221223915936118e-05, | |
| "loss": 4.1724, | |
| "step": 475648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.220386959065439e-05, | |
| "loss": 4.1753, | |
| "step": 476160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.219548364314387e-05, | |
| "loss": 4.1778, | |
| "step": 476672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.218709769563335e-05, | |
| "loss": 4.1676, | |
| "step": 477184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.217871174812283e-05, | |
| "loss": 4.1744, | |
| "step": 477696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.217034217941604e-05, | |
| "loss": 4.1685, | |
| "step": 478208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.216195623190552e-05, | |
| "loss": 4.1758, | |
| "step": 478720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2153570284395e-05, | |
| "loss": 4.181, | |
| "step": 479232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.214518433688448e-05, | |
| "loss": 4.1774, | |
| "step": 479744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.213679838937396e-05, | |
| "loss": 4.1747, | |
| "step": 480256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.212842882066717e-05, | |
| "loss": 4.1819, | |
| "step": 480768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.212004287315665e-05, | |
| "loss": 4.1707, | |
| "step": 481280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2111656925646134e-05, | |
| "loss": 4.1804, | |
| "step": 481792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2103270978135614e-05, | |
| "loss": 4.1728, | |
| "step": 482304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2094885030625094e-05, | |
| "loss": 4.1485, | |
| "step": 482816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.208649908311457e-05, | |
| "loss": 4.1919, | |
| "step": 483328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.207811313560405e-05, | |
| "loss": 4.1631, | |
| "step": 483840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.206972718809353e-05, | |
| "loss": 4.1751, | |
| "step": 484352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.206134124058301e-05, | |
| "loss": 4.1572, | |
| "step": 484864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2052971671876216e-05, | |
| "loss": 4.1658, | |
| "step": 485376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.204461848197316e-05, | |
| "loss": 4.1608, | |
| "step": 485888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.203623253446264e-05, | |
| "loss": 4.1717, | |
| "step": 486400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.202784658695212e-05, | |
| "loss": 4.1719, | |
| "step": 486912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.20194606394416e-05, | |
| "loss": 4.1719, | |
| "step": 487424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.201107469193109e-05, | |
| "loss": 4.1798, | |
| "step": 487936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.200268874442057e-05, | |
| "loss": 4.1652, | |
| "step": 488448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.199430279691004e-05, | |
| "loss": 4.1532, | |
| "step": 488960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.198591684939952e-05, | |
| "loss": 4.179, | |
| "step": 489472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1977530901889e-05, | |
| "loss": 4.1544, | |
| "step": 489984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.196914495437848e-05, | |
| "loss": 4.1525, | |
| "step": 490496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.196075900686796e-05, | |
| "loss": 4.1759, | |
| "step": 491008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.195237305935744e-05, | |
| "loss": 4.1745, | |
| "step": 491520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.194400349065065e-05, | |
| "loss": 4.1584, | |
| "step": 492032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.193561754314013e-05, | |
| "loss": 4.1665, | |
| "step": 492544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.192723159562961e-05, | |
| "loss": 4.1502, | |
| "step": 493056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.191884564811909e-05, | |
| "loss": 4.1617, | |
| "step": 493568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1910476079412306e-05, | |
| "loss": 4.1699, | |
| "step": 494080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1902090131901786e-05, | |
| "loss": 4.1687, | |
| "step": 494592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1893704184391266e-05, | |
| "loss": 4.1731, | |
| "step": 495104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1885318236880746e-05, | |
| "loss": 4.168, | |
| "step": 495616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1876948668173955e-05, | |
| "loss": 4.18, | |
| "step": 496128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1868562720663435e-05, | |
| "loss": 4.1532, | |
| "step": 496640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1860176773152915e-05, | |
| "loss": 4.1721, | |
| "step": 497152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1851807204446124e-05, | |
| "loss": 4.1615, | |
| "step": 497664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1843421256935604e-05, | |
| "loss": 4.1522, | |
| "step": 498176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1835035309425084e-05, | |
| "loss": 4.1664, | |
| "step": 498688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1826649361914564e-05, | |
| "loss": 4.1726, | |
| "step": 499200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1818263414404044e-05, | |
| "loss": 4.1659, | |
| "step": 499712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.180989384569726e-05, | |
| "loss": 4.1652, | |
| "step": 500224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.180150789818674e-05, | |
| "loss": 4.1486, | |
| "step": 500736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.179312195067622e-05, | |
| "loss": 4.1524, | |
| "step": 501248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.17847360031657e-05, | |
| "loss": 4.1664, | |
| "step": 501760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.177635005565518e-05, | |
| "loss": 4.1692, | |
| "step": 502272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.176796410814466e-05, | |
| "loss": 4.1575, | |
| "step": 502784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.175957816063414e-05, | |
| "loss": 4.1679, | |
| "step": 503296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.175119221312362e-05, | |
| "loss": 4.1506, | |
| "step": 503808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.174282264441683e-05, | |
| "loss": 4.1635, | |
| "step": 504320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.173445307571004e-05, | |
| "loss": 4.1557, | |
| "step": 504832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.172606712819952e-05, | |
| "loss": 4.1589, | |
| "step": 505344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1717681180689e-05, | |
| "loss": 4.1721, | |
| "step": 505856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.170929523317848e-05, | |
| "loss": 4.1715, | |
| "step": 506368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1700909285667964e-05, | |
| "loss": 4.1737, | |
| "step": 506880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1692523338157444e-05, | |
| "loss": 4.1456, | |
| "step": 507392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.168415376945065e-05, | |
| "loss": 4.1609, | |
| "step": 507904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.167576782194013e-05, | |
| "loss": 4.1541, | |
| "step": 508416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.166738187442961e-05, | |
| "loss": 4.1645, | |
| "step": 508928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.165901230572282e-05, | |
| "loss": 4.1575, | |
| "step": 509440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.16506263582123e-05, | |
| "loss": 4.1593, | |
| "step": 509952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.164224041070178e-05, | |
| "loss": 4.1507, | |
| "step": 510464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.163385446319126e-05, | |
| "loss": 4.1609, | |
| "step": 510976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.162546851568074e-05, | |
| "loss": 4.1445, | |
| "step": 511488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.161708256817022e-05, | |
| "loss": 4.1571, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.16086966206597e-05, | |
| "loss": 4.1421, | |
| "step": 512512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.160031067314918e-05, | |
| "loss": 4.1628, | |
| "step": 513024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.159192472563866e-05, | |
| "loss": 4.1535, | |
| "step": 513536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.158353877812814e-05, | |
| "loss": 4.1557, | |
| "step": 514048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.157515283061762e-05, | |
| "loss": 4.1518, | |
| "step": 514560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.15667668831071e-05, | |
| "loss": 4.1572, | |
| "step": 515072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.155838093559658e-05, | |
| "loss": 4.1573, | |
| "step": 515584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.155001136688979e-05, | |
| "loss": 4.1509, | |
| "step": 516096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.154162541937927e-05, | |
| "loss": 4.1604, | |
| "step": 516608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.153325585067248e-05, | |
| "loss": 4.1668, | |
| "step": 517120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.152486990316196e-05, | |
| "loss": 4.1609, | |
| "step": 517632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.151648395565144e-05, | |
| "loss": 4.152, | |
| "step": 518144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.150809800814092e-05, | |
| "loss": 4.1505, | |
| "step": 518656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.14997120606304e-05, | |
| "loss": 4.1588, | |
| "step": 519168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.149132611311988e-05, | |
| "loss": 4.146, | |
| "step": 519680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1482940165609366e-05, | |
| "loss": 4.1592, | |
| "step": 520192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1474554218098846e-05, | |
| "loss": 4.1438, | |
| "step": 520704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1466184649392055e-05, | |
| "loss": 4.161, | |
| "step": 521216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1457798701881535e-05, | |
| "loss": 4.1584, | |
| "step": 521728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1449412754371015e-05, | |
| "loss": 4.1538, | |
| "step": 522240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1441026806860495e-05, | |
| "loss": 4.1439, | |
| "step": 522752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1432657238153704e-05, | |
| "loss": 4.1491, | |
| "step": 523264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.142428766944691e-05, | |
| "loss": 4.1491, | |
| "step": 523776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.141590172193639e-05, | |
| "loss": 4.1614, | |
| "step": 524288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.140751577442587e-05, | |
| "loss": 4.1558, | |
| "step": 524800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.139912982691535e-05, | |
| "loss": 4.1542, | |
| "step": 525312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.139074387940483e-05, | |
| "loss": 4.1386, | |
| "step": 525824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.138237431069805e-05, | |
| "loss": 4.1595, | |
| "step": 526336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.137398836318753e-05, | |
| "loss": 4.1502, | |
| "step": 526848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.136560241567701e-05, | |
| "loss": 4.1629, | |
| "step": 527360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.135721646816649e-05, | |
| "loss": 4.1413, | |
| "step": 527872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.134883052065597e-05, | |
| "loss": 4.1606, | |
| "step": 528384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.134044457314545e-05, | |
| "loss": 4.1503, | |
| "step": 528896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.133205862563493e-05, | |
| "loss": 4.1647, | |
| "step": 529408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.132367267812441e-05, | |
| "loss": 4.1461, | |
| "step": 529920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.131530310941762e-05, | |
| "loss": 4.1554, | |
| "step": 530432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.13069171619071e-05, | |
| "loss": 4.1412, | |
| "step": 530944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.129853121439658e-05, | |
| "loss": 4.1533, | |
| "step": 531456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.129016164568979e-05, | |
| "loss": 4.1527, | |
| "step": 531968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1281775698179273e-05, | |
| "loss": 4.1434, | |
| "step": 532480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1273389750668753e-05, | |
| "loss": 4.153, | |
| "step": 532992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.126500380315823e-05, | |
| "loss": 4.1428, | |
| "step": 533504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1256617855647707e-05, | |
| "loss": 4.1562, | |
| "step": 534016 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.153366565704346, | |
| "eval_runtime": 292.5709, | |
| "eval_samples_per_second": 1304.268, | |
| "eval_steps_per_second": 40.759, | |
| "step": 534240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1248231908137186e-05, | |
| "loss": 4.151, | |
| "step": 534528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1239845960626666e-05, | |
| "loss": 4.1365, | |
| "step": 535040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1231460013116146e-05, | |
| "loss": 4.1536, | |
| "step": 535552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1223090444409356e-05, | |
| "loss": 4.1435, | |
| "step": 536064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1214704496898835e-05, | |
| "loss": 4.1557, | |
| "step": 536576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1206318549388315e-05, | |
| "loss": 4.1487, | |
| "step": 537088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1197932601877795e-05, | |
| "loss": 4.1473, | |
| "step": 537600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.118956303317101e-05, | |
| "loss": 4.1483, | |
| "step": 538112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.118117708566049e-05, | |
| "loss": 4.1425, | |
| "step": 538624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.117279113814997e-05, | |
| "loss": 4.1498, | |
| "step": 539136 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.116440519063945e-05, | |
| "loss": 4.151, | |
| "step": 539648 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.115601924312893e-05, | |
| "loss": 4.1513, | |
| "step": 540160 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.114764967442214e-05, | |
| "loss": 4.1474, | |
| "step": 540672 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.113926372691162e-05, | |
| "loss": 4.1381, | |
| "step": 541184 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.11308777794011e-05, | |
| "loss": 4.1399, | |
| "step": 541696 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.112249183189058e-05, | |
| "loss": 4.1312, | |
| "step": 542208 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.111410588438006e-05, | |
| "loss": 4.1422, | |
| "step": 542720 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.110571993686954e-05, | |
| "loss": 4.1461, | |
| "step": 543232 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.109733398935902e-05, | |
| "loss": 4.1368, | |
| "step": 543744 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.108896442065223e-05, | |
| "loss": 4.1586, | |
| "step": 544256 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.108057847314171e-05, | |
| "loss": 4.1539, | |
| "step": 544768 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1072192525631196e-05, | |
| "loss": 4.1564, | |
| "step": 545280 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1063806578120676e-05, | |
| "loss": 4.1443, | |
| "step": 545792 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1055420630610156e-05, | |
| "loss": 4.1441, | |
| "step": 546304 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1047034683099636e-05, | |
| "loss": 4.1518, | |
| "step": 546816 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1038648735589115e-05, | |
| "loss": 4.141, | |
| "step": 547328 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1030262788078595e-05, | |
| "loss": 4.1407, | |
| "step": 547840 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1021893219371805e-05, | |
| "loss": 4.1476, | |
| "step": 548352 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1013507271861285e-05, | |
| "loss": 4.1317, | |
| "step": 548864 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.1005121324350764e-05, | |
| "loss": 4.1362, | |
| "step": 549376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0996751755643974e-05, | |
| "loss": 4.1407, | |
| "step": 549888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0988365808133454e-05, | |
| "loss": 4.1453, | |
| "step": 550400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0979979860622933e-05, | |
| "loss": 4.1464, | |
| "step": 550912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0971593913112413e-05, | |
| "loss": 4.1457, | |
| "step": 551424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.096320796560189e-05, | |
| "loss": 4.1336, | |
| "step": 551936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.095482201809137e-05, | |
| "loss": 4.1361, | |
| "step": 552448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.094643607058085e-05, | |
| "loss": 4.1438, | |
| "step": 552960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.093805012307033e-05, | |
| "loss": 4.1282, | |
| "step": 553472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.092968055436354e-05, | |
| "loss": 4.1376, | |
| "step": 553984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.092129460685302e-05, | |
| "loss": 4.1285, | |
| "step": 554496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.091292503814624e-05, | |
| "loss": 4.1415, | |
| "step": 555008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.090453909063572e-05, | |
| "loss": 4.1374, | |
| "step": 555520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.089616952192893e-05, | |
| "loss": 4.1405, | |
| "step": 556032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.088778357441841e-05, | |
| "loss": 4.1397, | |
| "step": 556544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.087939762690789e-05, | |
| "loss": 4.141, | |
| "step": 557056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.087101167939737e-05, | |
| "loss": 4.1351, | |
| "step": 557568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.086262573188685e-05, | |
| "loss": 4.1427, | |
| "step": 558080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.085423978437633e-05, | |
| "loss": 4.1331, | |
| "step": 558592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.084585383686581e-05, | |
| "loss": 4.114, | |
| "step": 559104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.083746788935529e-05, | |
| "loss": 4.1514, | |
| "step": 559616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.082908194184477e-05, | |
| "loss": 4.1266, | |
| "step": 560128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.082069599433425e-05, | |
| "loss": 4.1372, | |
| "step": 560640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.081231004682373e-05, | |
| "loss": 4.1257, | |
| "step": 561152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.080392409931321e-05, | |
| "loss": 4.1262, | |
| "step": 561664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0795554530606416e-05, | |
| "loss": 4.1219, | |
| "step": 562176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0787168583095896e-05, | |
| "loss": 4.1332, | |
| "step": 562688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0778782635585376e-05, | |
| "loss": 4.1382, | |
| "step": 563200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0770396688074856e-05, | |
| "loss": 4.131, | |
| "step": 563712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0762010740564336e-05, | |
| "loss": 4.1452, | |
| "step": 564224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.075365755066128e-05, | |
| "loss": 4.1345, | |
| "step": 564736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.074527160315076e-05, | |
| "loss": 4.114, | |
| "step": 565248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.073688565564024e-05, | |
| "loss": 4.1383, | |
| "step": 565760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.072849970812972e-05, | |
| "loss": 4.1205, | |
| "step": 566272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.07201137606192e-05, | |
| "loss": 4.1128, | |
| "step": 566784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.071172781310868e-05, | |
| "loss": 4.1434, | |
| "step": 567296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.070334186559816e-05, | |
| "loss": 4.1361, | |
| "step": 567808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.069495591808764e-05, | |
| "loss": 4.1232, | |
| "step": 568320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.068658634938085e-05, | |
| "loss": 4.1317, | |
| "step": 568832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.067820040187033e-05, | |
| "loss": 4.1159, | |
| "step": 569344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.066981445435981e-05, | |
| "loss": 4.1251, | |
| "step": 569856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.066142850684929e-05, | |
| "loss": 4.1314, | |
| "step": 570368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0653058938142505e-05, | |
| "loss": 4.1338, | |
| "step": 570880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0644672990631985e-05, | |
| "loss": 4.1357, | |
| "step": 571392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0636303421925194e-05, | |
| "loss": 4.1303, | |
| "step": 571904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0627917474414674e-05, | |
| "loss": 4.1457, | |
| "step": 572416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0619531526904154e-05, | |
| "loss": 4.1165, | |
| "step": 572928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0611145579393634e-05, | |
| "loss": 4.1381, | |
| "step": 573440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0602759631883114e-05, | |
| "loss": 4.1235, | |
| "step": 573952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0594373684372594e-05, | |
| "loss": 4.1216, | |
| "step": 574464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0585987736862074e-05, | |
| "loss": 4.1312, | |
| "step": 574976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.0577601789351554e-05, | |
| "loss": 4.1357, | |
| "step": 575488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.056921584184103e-05, | |
| "loss": 4.1311, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.056084627313424e-05, | |
| "loss": 4.1353, | |
| "step": 576512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.055247670442746e-05, | |
| "loss": 4.1121, | |
| "step": 577024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.054409075691694e-05, | |
| "loss": 4.1175, | |
| "step": 577536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.053570480940642e-05, | |
| "loss": 4.1298, | |
| "step": 578048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.05273188618959e-05, | |
| "loss": 4.1359, | |
| "step": 578560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.051893291438538e-05, | |
| "loss": 4.1248, | |
| "step": 579072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.051057972448232e-05, | |
| "loss": 4.1311, | |
| "step": 579584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.05021937769718e-05, | |
| "loss": 4.1156, | |
| "step": 580096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.049380782946128e-05, | |
| "loss": 4.1296, | |
| "step": 580608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.048542188195076e-05, | |
| "loss": 4.1213, | |
| "step": 581120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.047703593444024e-05, | |
| "loss": 4.1225, | |
| "step": 581632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0468666365733446e-05, | |
| "loss": 4.1362, | |
| "step": 582144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0460280418222926e-05, | |
| "loss": 4.1362, | |
| "step": 582656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.045189447071241e-05, | |
| "loss": 4.1406, | |
| "step": 583168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.044350852320189e-05, | |
| "loss": 4.1155, | |
| "step": 583680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.043512257569137e-05, | |
| "loss": 4.1235, | |
| "step": 584192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.042673662818085e-05, | |
| "loss": 4.121, | |
| "step": 584704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0418350680670326e-05, | |
| "loss": 4.1273, | |
| "step": 585216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0409964733159806e-05, | |
| "loss": 4.122, | |
| "step": 585728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0401578785649285e-05, | |
| "loss": 4.1281, | |
| "step": 586240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0393209216942495e-05, | |
| "loss": 4.118, | |
| "step": 586752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0384823269431975e-05, | |
| "loss": 4.1219, | |
| "step": 587264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0376437321921454e-05, | |
| "loss": 4.118, | |
| "step": 587776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0368051374410934e-05, | |
| "loss": 4.1218, | |
| "step": 588288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0359665426900414e-05, | |
| "loss": 4.1083, | |
| "step": 588800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0351279479389894e-05, | |
| "loss": 4.1285, | |
| "step": 589312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0342893531879374e-05, | |
| "loss": 4.1208, | |
| "step": 589824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.033450758436886e-05, | |
| "loss": 4.1236, | |
| "step": 590336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.032613801566207e-05, | |
| "loss": 4.1168, | |
| "step": 590848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.031775206815155e-05, | |
| "loss": 4.1234, | |
| "step": 591360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.030936612064103e-05, | |
| "loss": 4.1251, | |
| "step": 591872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.030098017313051e-05, | |
| "loss": 4.1172, | |
| "step": 592384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.029259422561999e-05, | |
| "loss": 4.1292, | |
| "step": 592896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.028424103571693e-05, | |
| "loss": 4.1309, | |
| "step": 593408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.027585508820641e-05, | |
| "loss": 4.1214, | |
| "step": 593920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.026746914069589e-05, | |
| "loss": 4.1263, | |
| "step": 594432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.025908319318537e-05, | |
| "loss": 4.1162, | |
| "step": 594944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.025069724567485e-05, | |
| "loss": 4.1201, | |
| "step": 595456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.024231129816433e-05, | |
| "loss": 4.121, | |
| "step": 595968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0233925350653815e-05, | |
| "loss": 4.1244, | |
| "step": 596480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0225539403143295e-05, | |
| "loss": 4.1064, | |
| "step": 596992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0217153455632775e-05, | |
| "loss": 4.1274, | |
| "step": 597504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0208783886925984e-05, | |
| "loss": 4.1266, | |
| "step": 598016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0200397939415464e-05, | |
| "loss": 4.1226, | |
| "step": 598528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0192011991904944e-05, | |
| "loss": 4.1106, | |
| "step": 599040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0183626044394424e-05, | |
| "loss": 4.1158, | |
| "step": 599552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.017527285449136e-05, | |
| "loss": 4.1156, | |
| "step": 600064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.016688690698084e-05, | |
| "loss": 4.1271, | |
| "step": 600576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.015850095947032e-05, | |
| "loss": 4.1222, | |
| "step": 601088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.015013139076354e-05, | |
| "loss": 4.1243, | |
| "step": 601600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.014174544325302e-05, | |
| "loss": 4.1038, | |
| "step": 602112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.01333594957425e-05, | |
| "loss": 4.1253, | |
| "step": 602624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.012497354823198e-05, | |
| "loss": 4.119, | |
| "step": 603136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.011658760072146e-05, | |
| "loss": 4.1292, | |
| "step": 603648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.010820165321094e-05, | |
| "loss": 4.1125, | |
| "step": 604160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.009981570570042e-05, | |
| "loss": 4.122, | |
| "step": 604672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.00914297581899e-05, | |
| "loss": 4.1178, | |
| "step": 605184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.008304381067938e-05, | |
| "loss": 4.1335, | |
| "step": 605696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.007465786316886e-05, | |
| "loss": 4.114, | |
| "step": 606208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.006627191565833e-05, | |
| "loss": 4.1216, | |
| "step": 606720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.005788596814781e-05, | |
| "loss": 4.1156, | |
| "step": 607232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0049532778244755e-05, | |
| "loss": 4.1214, | |
| "step": 607744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0041146830734235e-05, | |
| "loss": 4.1145, | |
| "step": 608256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.003276088322372e-05, | |
| "loss": 4.1113, | |
| "step": 608768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.00243749357132e-05, | |
| "loss": 4.1214, | |
| "step": 609280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.001598898820268e-05, | |
| "loss": 4.109, | |
| "step": 609792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.000760304069216e-05, | |
| "loss": 4.1266, | |
| "step": 610304 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.129226207733154, | |
| "eval_runtime": 292.3983, | |
| "eval_samples_per_second": 1305.038, | |
| "eval_steps_per_second": 40.783, | |
| "step": 610560 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 2.5087752047469027e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |