| { | |
| "best_metric": 3.9272706508636475, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/re-irr-sv-agr/transformer/4/checkpoints/checkpoint-305280", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 305280, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 11.009, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8095, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.1651, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.9594, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.7982, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.694, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.5853, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5267, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4291, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.3859, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3256, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 5.2522, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 5.1968, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 5.1645, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 5.1134, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.089, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.0669, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0378, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0085, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 4.9983, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 4.9686, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9489, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.9122, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9128, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790400448648195e-05, | |
| "loss": 4.8816, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8695, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8503, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8233, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756873037409844e-05, | |
| "loss": 4.8105, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.7914, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 4.7841, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 4.7687, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.97233456261715e-05, | |
| "loss": 4.7612, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.7491, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.7362, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969820416244367e-05, | |
| "loss": 4.7235, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 4.6883, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 4.6873, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 4.6762, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 4.6602, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6636, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6576, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6361, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 4.6361, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 4.6288, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.6205, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6164, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.5972, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9589219602414374e-05, | |
| "loss": 4.5849, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958083365490385e-05, | |
| "loss": 4.5868, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957244770739333e-05, | |
| "loss": 4.5729, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 4.5727, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955567581237229e-05, | |
| "loss": 4.5461, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954732262246923e-05, | |
| "loss": 4.5598, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.953893667495871e-05, | |
| "loss": 4.5338, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.953055072744819e-05, | |
| "loss": 4.5539, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.952216477993767e-05, | |
| "loss": 4.5278, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951377883242715e-05, | |
| "loss": 4.5255, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.950539288491663e-05, | |
| "loss": 4.518, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949700693740611e-05, | |
| "loss": 4.513, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948862098989559e-05, | |
| "loss": 4.4991, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94802514211888e-05, | |
| "loss": 4.5042, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947186547367828e-05, | |
| "loss": 4.4797, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 4.4747, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945510995746097e-05, | |
| "loss": 4.4883, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944672400995045e-05, | |
| "loss": 4.48, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943833806243993e-05, | |
| "loss": 4.4671, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942995211492941e-05, | |
| "loss": 4.4529, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942156616741889e-05, | |
| "loss": 4.4541, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941318021990837e-05, | |
| "loss": 4.4609, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404794272397856e-05, | |
| "loss": 4.4558, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396424703691065e-05, | |
| "loss": 4.4483, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388038756180545e-05, | |
| "loss": 4.4451, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379652808670025e-05, | |
| "loss": 4.4465, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371266861159505e-05, | |
| "loss": 4.4403, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362897292452714e-05, | |
| "loss": 4.4182, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354511344942194e-05, | |
| "loss": 4.42, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346125397431674e-05, | |
| "loss": 4.4199, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337739449921154e-05, | |
| "loss": 4.4095, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329353502410634e-05, | |
| "loss": 4.4097, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9320967554900114e-05, | |
| "loss": 4.4036, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9312581607389594e-05, | |
| "loss": 4.4193, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9304195659879074e-05, | |
| "loss": 4.3935, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.929582609117229e-05, | |
| "loss": 4.3795, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.928744014366177e-05, | |
| "loss": 4.3901, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927905419615125e-05, | |
| "loss": 4.3872, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927066824864073e-05, | |
| "loss": 4.3929, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.926229867993394e-05, | |
| "loss": 4.3719, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925392911122715e-05, | |
| "loss": 4.3744, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924554316371663e-05, | |
| "loss": 4.3727, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923715721620611e-05, | |
| "loss": 4.3668, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922877126869559e-05, | |
| "loss": 4.3566, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922038532118507e-05, | |
| "loss": 4.3593, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921199937367455e-05, | |
| "loss": 4.3604, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920362980496776e-05, | |
| "loss": 4.3678, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919524385745724e-05, | |
| "loss": 4.3472, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918685790994672e-05, | |
| "loss": 4.3402, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91784719624362e-05, | |
| "loss": 4.3392, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9170086014925676e-05, | |
| "loss": 4.3386, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9161700067415156e-05, | |
| "loss": 4.3461, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9153314119904636e-05, | |
| "loss": 4.3322, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9144928172394116e-05, | |
| "loss": 4.3259, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9136558603687325e-05, | |
| "loss": 4.3194, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128172656176805e-05, | |
| "loss": 4.3281, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119786708666285e-05, | |
| "loss": 4.2985, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111400761155765e-05, | |
| "loss": 4.3173, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.910303119244898e-05, | |
| "loss": 4.3088, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.909464524493846e-05, | |
| "loss": 4.315, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908625929742794e-05, | |
| "loss": 4.3032, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907787334991742e-05, | |
| "loss": 4.3084, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90694874024069e-05, | |
| "loss": 4.2974, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906110145489638e-05, | |
| "loss": 4.2982, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905273188618959e-05, | |
| "loss": 4.3028, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904434593867907e-05, | |
| "loss": 4.2857, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903595999116855e-05, | |
| "loss": 4.3022, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.902757404365803e-05, | |
| "loss": 4.2988, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901920447495124e-05, | |
| "loss": 4.2908, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901081852744072e-05, | |
| "loss": 4.2812, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90024325799302e-05, | |
| "loss": 4.2801, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994046632419686e-05, | |
| "loss": 4.2832, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8985660684909166e-05, | |
| "loss": 4.2801, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8977274737398646e-05, | |
| "loss": 4.2792, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968888789888125e-05, | |
| "loss": 4.268, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960502842377605e-05, | |
| "loss": 4.2754, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952133273670815e-05, | |
| "loss": 4.2601, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943747326160294e-05, | |
| "loss": 4.2638, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935361378649774e-05, | |
| "loss": 4.2515, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926991809942984e-05, | |
| "loss": 4.2554, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918605862432463e-05, | |
| "loss": 4.2437, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910219914921943e-05, | |
| "loss": 4.2666, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8901833967411423e-05, | |
| "loss": 4.2705, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88934480199009e-05, | |
| "loss": 4.2571, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888506207239038e-05, | |
| "loss": 4.2336, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887667612487986e-05, | |
| "loss": 4.2548, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886829017736934e-05, | |
| "loss": 4.2475, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885992060866256e-05, | |
| "loss": 4.2457, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885155103995577e-05, | |
| "loss": 4.2374, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884316509244525e-05, | |
| "loss": 4.2459, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883477914493473e-05, | |
| "loss": 4.2392, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882639319742421e-05, | |
| "loss": 4.2337, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881800724991369e-05, | |
| "loss": 4.2348, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880962130240316e-05, | |
| "loss": 4.2369, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880123535489264e-05, | |
| "loss": 4.223, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879286578618586e-05, | |
| "loss": 4.225, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878447983867534e-05, | |
| "loss": 4.237, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877609389116482e-05, | |
| "loss": 4.2213, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87677079436543e-05, | |
| "loss": 4.2224, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875932199614378e-05, | |
| "loss": 4.2328, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875093604863326e-05, | |
| "loss": 4.2133, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.207637310028076, | |
| "eval_runtime": 302.2209, | |
| "eval_samples_per_second": 1262.623, | |
| "eval_steps_per_second": 39.458, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874255010112274e-05, | |
| "loss": 4.1968, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8734180532415946e-05, | |
| "loss": 4.2045, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8725794584905426e-05, | |
| "loss": 4.2192, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8717408637394906e-05, | |
| "loss": 4.2089, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8709022689884386e-05, | |
| "loss": 4.2145, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8700636742373866e-05, | |
| "loss": 4.192, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8692250794863346e-05, | |
| "loss": 4.2047, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683864847352826e-05, | |
| "loss": 4.1838, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.867549527864604e-05, | |
| "loss": 4.2006, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.866710933113552e-05, | |
| "loss": 4.1845, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658723383625e-05, | |
| "loss": 4.2007, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865033743611448e-05, | |
| "loss": 4.1956, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.864196786740769e-05, | |
| "loss": 4.1857, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.863358191989717e-05, | |
| "loss": 4.1773, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862519597238665e-05, | |
| "loss": 4.1703, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.861681002487613e-05, | |
| "loss": 4.1704, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860842407736561e-05, | |
| "loss": 4.1794, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860003812985509e-05, | |
| "loss": 4.177, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.859165218234457e-05, | |
| "loss": 4.169, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.858326623483405e-05, | |
| "loss": 4.1887, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857488028732352e-05, | |
| "loss": 4.1814, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856649433981301e-05, | |
| "loss": 4.1729, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.855810839230249e-05, | |
| "loss": 4.164, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.85497388235957e-05, | |
| "loss": 4.1831, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8541369254888915e-05, | |
| "loss": 4.1552, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8532983307378395e-05, | |
| "loss": 4.1677, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8524597359867875e-05, | |
| "loss": 4.1657, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851621141235735e-05, | |
| "loss": 4.1584, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850782546484683e-05, | |
| "loss": 4.1492, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849943951733631e-05, | |
| "loss": 4.1501, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8491069948629524e-05, | |
| "loss": 4.1548, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8482684001119e-05, | |
| "loss": 4.1533, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847429805360848e-05, | |
| "loss": 4.1539, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8465912106097964e-05, | |
| "loss": 4.1527, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8457526158587444e-05, | |
| "loss": 4.1513, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8449140211076924e-05, | |
| "loss": 4.1481, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8440754263566404e-05, | |
| "loss": 4.1318, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.843238469485961e-05, | |
| "loss": 4.137, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.842399874734909e-05, | |
| "loss": 4.1327, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.841561279983857e-05, | |
| "loss": 4.1284, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840722685232805e-05, | |
| "loss": 4.1384, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839885728362126e-05, | |
| "loss": 4.1396, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839047133611074e-05, | |
| "loss": 4.1273, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838208538860022e-05, | |
| "loss": 4.1265, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83736994410897e-05, | |
| "loss": 4.1296, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836531349357918e-05, | |
| "loss": 4.1312, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.835692754606866e-05, | |
| "loss": 4.1323, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834854159855814e-05, | |
| "loss": 4.1226, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8340188408655086e-05, | |
| "loss": 4.1204, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8331802461144566e-05, | |
| "loss": 4.1243, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8323416513634046e-05, | |
| "loss": 4.1165, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8315030566123526e-05, | |
| "loss": 4.1194, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8306644618613006e-05, | |
| "loss": 4.1097, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8298275049906215e-05, | |
| "loss": 4.1225, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289889102395695e-05, | |
| "loss": 4.0954, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281503154885175e-05, | |
| "loss": 4.12, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273117207374655e-05, | |
| "loss": 4.1087, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826474763866787e-05, | |
| "loss": 4.106, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825636169115735e-05, | |
| "loss": 4.1145, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.824797574364683e-05, | |
| "loss": 4.1009, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823958979613631e-05, | |
| "loss": 4.0953, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823122022742952e-05, | |
| "loss": 4.1112, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222834279919e-05, | |
| "loss": 4.0843, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.821444833240848e-05, | |
| "loss": 4.0861, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820606238489796e-05, | |
| "loss": 4.1004, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819767643738744e-05, | |
| "loss": 4.1008, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818930686868065e-05, | |
| "loss": 4.0937, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818092092117013e-05, | |
| "loss": 4.0827, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817253497365961e-05, | |
| "loss": 4.084, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816414902614909e-05, | |
| "loss": 4.0893, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155779457442305e-05, | |
| "loss": 4.0975, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147393509931785e-05, | |
| "loss": 4.0889, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8139007562421265e-05, | |
| "loss": 4.0939, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130621614910745e-05, | |
| "loss": 4.0931, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122252046203954e-05, | |
| "loss": 4.0939, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113866098693434e-05, | |
| "loss": 4.075, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8105480151182914e-05, | |
| "loss": 4.0784, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8097094203672393e-05, | |
| "loss": 4.0866, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8088708256161873e-05, | |
| "loss": 4.076, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808033868745508e-05, | |
| "loss": 4.0787, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.807195273994456e-05, | |
| "loss": 4.0716, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.806356679243404e-05, | |
| "loss": 4.0936, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805518084492352e-05, | |
| "loss": 4.0728, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804681127621674e-05, | |
| "loss": 4.0612, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803842532870622e-05, | |
| "loss": 4.0729, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80300393811957e-05, | |
| "loss": 4.0741, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802165343368518e-05, | |
| "loss": 4.0808, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801326748617466e-05, | |
| "loss": 4.0644, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800489791746787e-05, | |
| "loss": 4.0654, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799651196995735e-05, | |
| "loss": 4.0744, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798812602244683e-05, | |
| "loss": 4.0628, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79797400749363e-05, | |
| "loss": 4.0584, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7971370506229516e-05, | |
| "loss": 4.0626, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7962984558718996e-05, | |
| "loss": 4.0703, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7954598611208476e-05, | |
| "loss": 4.0701, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946212663697956e-05, | |
| "loss": 4.0651, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.793784309499117e-05, | |
| "loss": 4.0488, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792945714748065e-05, | |
| "loss": 4.0553, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792107119997013e-05, | |
| "loss": 4.0571, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912685252459605e-05, | |
| "loss": 4.0613, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790431568375282e-05, | |
| "loss": 4.0571, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78959297362423e-05, | |
| "loss": 4.0518, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7887543788731774e-05, | |
| "loss": 4.0439, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7879157841221254e-05, | |
| "loss": 4.0589, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787078827251447e-05, | |
| "loss": 4.026, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786240232500395e-05, | |
| "loss": 4.0519, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7854032756297166e-05, | |
| "loss": 4.0421, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7845646808786646e-05, | |
| "loss": 4.0537, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7837260861276126e-05, | |
| "loss": 4.0394, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7828874913765606e-05, | |
| "loss": 4.0507, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782048896625508e-05, | |
| "loss": 4.0389, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781210301874456e-05, | |
| "loss": 4.0395, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7803733450037775e-05, | |
| "loss": 4.0488, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779534750252725e-05, | |
| "loss": 4.036, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778696155501673e-05, | |
| "loss": 4.0485, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777857560750621e-05, | |
| "loss": 4.0522, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7770206038799423e-05, | |
| "loss": 4.0434, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7761820091288903e-05, | |
| "loss": 4.0349, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7753434143778383e-05, | |
| "loss": 4.0379, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774504819626786e-05, | |
| "loss": 4.0406, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.773666224875734e-05, | |
| "loss": 4.0287, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772827630124682e-05, | |
| "loss": 4.0446, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.77198903537363e-05, | |
| "loss": 4.0282, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771150440622578e-05, | |
| "loss": 4.0365, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770313483751899e-05, | |
| "loss": 4.0311, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769474889000847e-05, | |
| "loss": 4.027, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768636294249795e-05, | |
| "loss": 4.0172, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767797699498743e-05, | |
| "loss": 4.0242, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766960742628064e-05, | |
| "loss": 4.016, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766122147877012e-05, | |
| "loss": 4.037, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76528355312596e-05, | |
| "loss": 4.0458, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764444958374909e-05, | |
| "loss": 4.0317, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76360800150423e-05, | |
| "loss": 4.0103, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762769406753178e-05, | |
| "loss": 4.0325, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761930812002126e-05, | |
| "loss": 4.0236, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761092217251074e-05, | |
| "loss": 4.0272, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7602552603803946e-05, | |
| "loss": 4.0224, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7594166656293426e-05, | |
| "loss": 4.0245, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7585780708782906e-05, | |
| "loss": 4.0196, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7577394761272386e-05, | |
| "loss": 4.0208, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7569025192565595e-05, | |
| "loss": 4.0191, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7560639245055075e-05, | |
| "loss": 4.0241, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7552253297544555e-05, | |
| "loss": 4.0166, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.754388372883777e-05, | |
| "loss": 4.0099, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.753549778132725e-05, | |
| "loss": 4.0266, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.752711183381673e-05, | |
| "loss": 4.0186, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751872588630621e-05, | |
| "loss": 4.0112, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751033993879569e-05, | |
| "loss": 4.0261, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.750195399128517e-05, | |
| "loss": 4.0078, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.040642738342285, | |
| "eval_runtime": 301.9306, | |
| "eval_samples_per_second": 1263.837, | |
| "eval_steps_per_second": 39.496, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.749356804377465e-05, | |
| "loss": 3.9944, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.748518209626413e-05, | |
| "loss": 4.0006, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.747679614875361e-05, | |
| "loss": 4.0196, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7468410201243084e-05, | |
| "loss": 4.012, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7460024253732563e-05, | |
| "loss": 4.0153, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451638306222043e-05, | |
| "loss": 3.9956, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.744325235871152e-05, | |
| "loss": 4.0103, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434866411201e-05, | |
| "loss": 3.9869, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.742648046369049e-05, | |
| "loss": 4.0128, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.741809451617997e-05, | |
| "loss": 3.9934, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740970856866945e-05, | |
| "loss": 4.01, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740133899996266e-05, | |
| "loss": 4.004, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739296943125587e-05, | |
| "loss": 3.9969, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.738458348374535e-05, | |
| "loss": 3.9886, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.737619753623483e-05, | |
| "loss": 3.9864, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736781158872431e-05, | |
| "loss": 3.9868, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735944202001752e-05, | |
| "loss": 3.9928, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7351056072507e-05, | |
| "loss": 3.9946, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.734267012499648e-05, | |
| "loss": 3.9913, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733428417748596e-05, | |
| "loss": 4.0051, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7325898229975444e-05, | |
| "loss": 4.0017, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7317512282464924e-05, | |
| "loss": 3.9974, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7309126334954404e-05, | |
| "loss": 3.9895, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7300740387443884e-05, | |
| "loss": 4.001, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729237081873709e-05, | |
| "loss": 3.9816, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728398487122657e-05, | |
| "loss": 3.993, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727559892371605e-05, | |
| "loss": 3.9909, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726721297620553e-05, | |
| "loss": 3.9876, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725884340749874e-05, | |
| "loss": 3.9755, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725045745998822e-05, | |
| "loss": 3.9784, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.72420715124777e-05, | |
| "loss": 3.9816, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.723370194377091e-05, | |
| "loss": 3.9881, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.72253159962604e-05, | |
| "loss": 3.9895, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721693004874988e-05, | |
| "loss": 3.9817, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720854410123936e-05, | |
| "loss": 3.9821, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720015815372884e-05, | |
| "loss": 3.9811, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719177220621832e-05, | |
| "loss": 3.9668, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71833862587078e-05, | |
| "loss": 3.977, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7175016690001006e-05, | |
| "loss": 3.9696, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7166630742490486e-05, | |
| "loss": 3.9687, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7158244794979966e-05, | |
| "loss": 3.9724, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7149858847469446e-05, | |
| "loss": 3.9813, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7141489278762655e-05, | |
| "loss": 3.9687, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7133103331252135e-05, | |
| "loss": 3.9645, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7124717383741615e-05, | |
| "loss": 3.9778, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7116331436231095e-05, | |
| "loss": 3.97, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710796186752431e-05, | |
| "loss": 3.9781, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709957592001379e-05, | |
| "loss": 3.9639, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709118997250327e-05, | |
| "loss": 3.9609, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7082804024992744e-05, | |
| "loss": 3.9711, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.707443445628596e-05, | |
| "loss": 3.9637, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706604850877544e-05, | |
| "loss": 3.9671, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705767894006865e-05, | |
| "loss": 3.9577, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704929299255813e-05, | |
| "loss": 3.9637, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704090704504761e-05, | |
| "loss": 3.9505, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703252109753709e-05, | |
| "loss": 3.9655, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702413515002657e-05, | |
| "loss": 3.96, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701574920251605e-05, | |
| "loss": 3.9593, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.700736325500553e-05, | |
| "loss": 3.9683, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699897730749501e-05, | |
| "loss": 3.9509, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699060773878822e-05, | |
| "loss": 3.9473, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69822217912777e-05, | |
| "loss": 3.9686, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.697383584376718e-05, | |
| "loss": 3.9398, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.696544989625666e-05, | |
| "loss": 3.939, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695708032754987e-05, | |
| "loss": 3.9541, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694869438003935e-05, | |
| "loss": 3.9597, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694032481133256e-05, | |
| "loss": 3.9513, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693193886382204e-05, | |
| "loss": 3.9421, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692355291631152e-05, | |
| "loss": 3.9432, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6915166968801e-05, | |
| "loss": 3.944, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690679740009422e-05, | |
| "loss": 3.9584, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.689841145258369e-05, | |
| "loss": 3.9506, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.689002550507317e-05, | |
| "loss": 3.9533, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688163955756265e-05, | |
| "loss": 3.9523, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.687325361005213e-05, | |
| "loss": 3.957, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686486766254161e-05, | |
| "loss": 3.9378, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.685648171503109e-05, | |
| "loss": 3.9414, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.684809576752057e-05, | |
| "loss": 3.9504, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683974257761751e-05, | |
| "loss": 3.9362, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6831356630106996e-05, | |
| "loss": 3.9446, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6822970682596476e-05, | |
| "loss": 3.9367, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6814584735085956e-05, | |
| "loss": 3.9553, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6806198787575436e-05, | |
| "loss": 3.9401, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6797812840064916e-05, | |
| "loss": 3.9304, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6789426892554396e-05, | |
| "loss": 3.9352, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6781040945043876e-05, | |
| "loss": 3.9419, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6772671376337085e-05, | |
| "loss": 3.9525, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6764285428826565e-05, | |
| "loss": 3.9335, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6755899481316045e-05, | |
| "loss": 3.9372, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6747513533805525e-05, | |
| "loss": 3.9351, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739127586295005e-05, | |
| "loss": 3.9426, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6730758017588214e-05, | |
| "loss": 3.9304, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722372070077694e-05, | |
| "loss": 3.9296, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6713986122567174e-05, | |
| "loss": 3.9414, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.670560017505666e-05, | |
| "loss": 3.9424, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669723060634987e-05, | |
| "loss": 3.9398, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668884465883935e-05, | |
| "loss": 3.9225, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668045871132883e-05, | |
| "loss": 3.9294, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.667207276381831e-05, | |
| "loss": 3.9302, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666368681630779e-05, | |
| "loss": 3.9343, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665530086879727e-05, | |
| "loss": 3.9345, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664693130009048e-05, | |
| "loss": 3.9256, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663854535257996e-05, | |
| "loss": 3.9216, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663015940506944e-05, | |
| "loss": 3.9341, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662177345755892e-05, | |
| "loss": 3.9076, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66133875100484e-05, | |
| "loss": 3.9262, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6605017941341614e-05, | |
| "loss": 3.9179, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6596631993831094e-05, | |
| "loss": 3.9324, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6588246046320574e-05, | |
| "loss": 3.9218, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6579860098810054e-05, | |
| "loss": 3.9266, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657147415129953e-05, | |
| "loss": 3.9184, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656308820378901e-05, | |
| "loss": 3.9211, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655471863508222e-05, | |
| "loss": 3.9287, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65463326875717e-05, | |
| "loss": 3.9201, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6537946740061176e-05, | |
| "loss": 3.9297, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6529560792550656e-05, | |
| "loss": 3.9314, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65212076026476e-05, | |
| "loss": 3.9286, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.651282165513708e-05, | |
| "loss": 3.9192, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650443570762657e-05, | |
| "loss": 3.9149, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.649604976011605e-05, | |
| "loss": 3.9276, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.648766381260553e-05, | |
| "loss": 3.9112, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6479277865095e-05, | |
| "loss": 3.9256, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647089191758448e-05, | |
| "loss": 3.9139, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.646250597007396e-05, | |
| "loss": 3.921, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.645413640136718e-05, | |
| "loss": 3.9166, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.644575045385665e-05, | |
| "loss": 3.9148, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.643736450634613e-05, | |
| "loss": 3.9028, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642897855883561e-05, | |
| "loss": 3.9131, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642059261132509e-05, | |
| "loss": 3.9072, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6412223042618306e-05, | |
| "loss": 3.9176, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6403837095107786e-05, | |
| "loss": 3.9305, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6395451147597266e-05, | |
| "loss": 3.9247, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6387065200086746e-05, | |
| "loss": 3.8943, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6378679252576226e-05, | |
| "loss": 3.9243, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6370309683869435e-05, | |
| "loss": 3.9136, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6361923736358915e-05, | |
| "loss": 3.9142, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6353537788848395e-05, | |
| "loss": 3.9034, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6345151841337874e-05, | |
| "loss": 3.9161, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6336765893827354e-05, | |
| "loss": 3.9085, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6328379946316834e-05, | |
| "loss": 3.9107, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6319993998806314e-05, | |
| "loss": 3.912, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6311624430099523e-05, | |
| "loss": 3.9148, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303238482589003e-05, | |
| "loss": 3.9099, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.629486891388222e-05, | |
| "loss": 3.9053, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.62864829663717e-05, | |
| "loss": 3.9144, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.627809701886118e-05, | |
| "loss": 3.9132, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626971107135066e-05, | |
| "loss": 3.9037, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626132512384014e-05, | |
| "loss": 3.9176, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625293917632962e-05, | |
| "loss": 3.9019, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.9672722816467285, | |
| "eval_runtime": 325.6205, | |
| "eval_samples_per_second": 1171.889, | |
| "eval_steps_per_second": 36.622, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.62445532288191e-05, | |
| "loss": 3.894, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.623616728130858e-05, | |
| "loss": 3.8961, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.622778133379806e-05, | |
| "loss": 3.9129, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621939538628753e-05, | |
| "loss": 3.9046, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621100943877701e-05, | |
| "loss": 3.9166, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620263987007023e-05, | |
| "loss": 3.8888, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.619425392255971e-05, | |
| "loss": 3.9068, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.618586797504919e-05, | |
| "loss": 3.8817, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617748202753867e-05, | |
| "loss": 3.9123, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6169112458831884e-05, | |
| "loss": 3.8929, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6160726511321364e-05, | |
| "loss": 3.9041, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.615234056381084e-05, | |
| "loss": 3.9026, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.614397099510405e-05, | |
| "loss": 3.8895, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613558504759353e-05, | |
| "loss": 3.8939, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6127199100083006e-05, | |
| "loss": 3.8864, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6118813152572486e-05, | |
| "loss": 3.8845, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6110427205061966e-05, | |
| "loss": 3.8955, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6102041257551446e-05, | |
| "loss": 3.8922, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6093655310040926e-05, | |
| "loss": 3.8918, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6085269362530406e-05, | |
| "loss": 3.9051, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.607689979382362e-05, | |
| "loss": 3.9054, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.60685138463131e-05, | |
| "loss": 3.8967, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606012789880258e-05, | |
| "loss": 3.8917, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.605174195129206e-05, | |
| "loss": 3.9009, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.604337238258527e-05, | |
| "loss": 3.8846, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.603498643507475e-05, | |
| "loss": 3.8982, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.602660048756423e-05, | |
| "loss": 3.8931, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.601821454005371e-05, | |
| "loss": 3.8865, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.600984497134692e-05, | |
| "loss": 3.8795, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.60014590238364e-05, | |
| "loss": 3.8827, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599307307632588e-05, | |
| "loss": 3.8853, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.598468712881536e-05, | |
| "loss": 3.893, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5976317560108575e-05, | |
| "loss": 3.8944, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5967931612598055e-05, | |
| "loss": 3.887, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5959545665087535e-05, | |
| "loss": 3.8871, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5951159717577015e-05, | |
| "loss": 3.8875, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5942773770066495e-05, | |
| "loss": 3.8725, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5934404201359704e-05, | |
| "loss": 3.8812, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5926018253849184e-05, | |
| "loss": 3.8721, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5917632306338664e-05, | |
| "loss": 3.8776, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5909246358828144e-05, | |
| "loss": 3.8785, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590087679012135e-05, | |
| "loss": 3.8837, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.589249084261083e-05, | |
| "loss": 3.8836, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.588410489510031e-05, | |
| "loss": 3.8703, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.58757189475898e-05, | |
| "loss": 3.8834, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.586733300007928e-05, | |
| "loss": 3.884, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585896343137249e-05, | |
| "loss": 3.884, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585057748386197e-05, | |
| "loss": 3.8757, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.584219153635145e-05, | |
| "loss": 3.8671, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.583380558884093e-05, | |
| "loss": 3.8814, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.582543602013414e-05, | |
| "loss": 3.8715, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.581705007262362e-05, | |
| "loss": 3.877, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.58086641251131e-05, | |
| "loss": 3.8691, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580027817760258e-05, | |
| "loss": 3.8701, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.579189223009206e-05, | |
| "loss": 3.8651, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5783522661385267e-05, | |
| "loss": 3.8755, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577513671387475e-05, | |
| "loss": 3.8724, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576675076636423e-05, | |
| "loss": 3.873, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.575836481885371e-05, | |
| "loss": 3.8763, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574999525014692e-05, | |
| "loss": 3.8612, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.57416093026364e-05, | |
| "loss": 3.8626, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.573322335512588e-05, | |
| "loss": 3.8834, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.572483740761536e-05, | |
| "loss": 3.8524, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.571646783890857e-05, | |
| "loss": 3.8507, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.570808189139805e-05, | |
| "loss": 3.8635, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569969594388753e-05, | |
| "loss": 3.8745, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569130999637701e-05, | |
| "loss": 3.8643, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568294042767022e-05, | |
| "loss": 3.8594, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.567455448015971e-05, | |
| "loss": 3.8538, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.566616853264919e-05, | |
| "loss": 3.8587, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.565778258513867e-05, | |
| "loss": 3.8734, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5649413016431876e-05, | |
| "loss": 3.8655, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5641027068921356e-05, | |
| "loss": 3.8638, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5632641121410836e-05, | |
| "loss": 3.8721, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5624255173900316e-05, | |
| "loss": 3.8721, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.561586922638979e-05, | |
| "loss": 3.8536, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5607499657683005e-05, | |
| "loss": 3.86, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5599113710172485e-05, | |
| "loss": 3.8673, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5590727762661965e-05, | |
| "loss": 3.8492, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5582341815151445e-05, | |
| "loss": 3.8622, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5573955867640925e-05, | |
| "loss": 3.8542, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556558629893414e-05, | |
| "loss": 3.8734, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5557200351423614e-05, | |
| "loss": 3.8577, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5548814403913094e-05, | |
| "loss": 3.8496, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5540428456402574e-05, | |
| "loss": 3.848, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553205888769579e-05, | |
| "loss": 3.8589, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552367294018526e-05, | |
| "loss": 3.8698, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.551528699267474e-05, | |
| "loss": 3.855, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.550690104516422e-05, | |
| "loss": 3.852, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549853147645744e-05, | |
| "loss": 3.8514, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549014552894691e-05, | |
| "loss": 3.8635, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548175958143639e-05, | |
| "loss": 3.8487, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.547337363392588e-05, | |
| "loss": 3.8507, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.546500406521909e-05, | |
| "loss": 3.8608, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.545661811770857e-05, | |
| "loss": 3.8642, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.544823217019805e-05, | |
| "loss": 3.8561, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543984622268753e-05, | |
| "loss": 3.8402, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5431476653980736e-05, | |
| "loss": 3.8513, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5423090706470216e-05, | |
| "loss": 3.8487, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5414704758959696e-05, | |
| "loss": 3.8552, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406318811449176e-05, | |
| "loss": 3.8549, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5397949242742385e-05, | |
| "loss": 3.8464, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389563295231865e-05, | |
| "loss": 3.8449, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5381177347721345e-05, | |
| "loss": 3.8519, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.537279140021083e-05, | |
| "loss": 3.832, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.536442183150404e-05, | |
| "loss": 3.8483, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.535603588399352e-05, | |
| "loss": 3.8372, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5347649936483e-05, | |
| "loss": 3.8548, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533926398897248e-05, | |
| "loss": 3.8441, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533089442026569e-05, | |
| "loss": 3.8458, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.532250847275517e-05, | |
| "loss": 3.8409, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.531412252524465e-05, | |
| "loss": 3.8474, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530573657773413e-05, | |
| "loss": 3.8524, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.529736700902734e-05, | |
| "loss": 3.8391, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528898106151682e-05, | |
| "loss": 3.8509, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52805951140063e-05, | |
| "loss": 3.8542, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5272225545299515e-05, | |
| "loss": 3.8489, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5263839597788995e-05, | |
| "loss": 3.8486, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5255453650278475e-05, | |
| "loss": 3.8358, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5247067702767955e-05, | |
| "loss": 3.8526, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5238681755257435e-05, | |
| "loss": 3.841, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5230295807746915e-05, | |
| "loss": 3.8461, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5221909860236395e-05, | |
| "loss": 3.8355, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5213523912725875e-05, | |
| "loss": 3.8454, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5205154344019084e-05, | |
| "loss": 3.8409, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5196768396508564e-05, | |
| "loss": 3.8375, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5188382448998044e-05, | |
| "loss": 3.8301, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5179996501487524e-05, | |
| "loss": 3.8345, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517162693278074e-05, | |
| "loss": 3.8357, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.516324098527022e-05, | |
| "loss": 3.8417, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.51548550377597e-05, | |
| "loss": 3.8522, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514646909024918e-05, | |
| "loss": 3.8514, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513809952154239e-05, | |
| "loss": 3.8224, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512971357403187e-05, | |
| "loss": 3.8519, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512132762652135e-05, | |
| "loss": 3.8389, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511294167901083e-05, | |
| "loss": 3.8404, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.510457211030404e-05, | |
| "loss": 3.8313, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509618616279352e-05, | |
| "loss": 3.842, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5087800215283e-05, | |
| "loss": 3.8342, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507941426777248e-05, | |
| "loss": 3.8441, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507104469906569e-05, | |
| "loss": 3.8361, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.506265875155517e-05, | |
| "loss": 3.8396, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.505427280404465e-05, | |
| "loss": 3.8406, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.504588685653413e-05, | |
| "loss": 3.8312, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.503751728782734e-05, | |
| "loss": 3.84, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.502913134031682e-05, | |
| "loss": 3.8458, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.50207453928063e-05, | |
| "loss": 3.8311, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.501235944529578e-05, | |
| "loss": 3.8401, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.500397349778526e-05, | |
| "loss": 3.8342, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.9272706508636475, | |
| "eval_runtime": 302.1874, | |
| "eval_samples_per_second": 1262.763, | |
| "eval_steps_per_second": 39.462, | |
| "step": 305280 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 2.0592098703293645e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |