| { | |
| "best_metric": 3.9110300540924072, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/det-noun/transformer/0/checkpoints/checkpoint-305280", | |
| "epoch": 0.025000606015738065, | |
| "eval_steps": 10, | |
| "global_step": 305280, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 11.0151, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8282, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.1915, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.9799, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.8143, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.7046, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.6062, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5397, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4571, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.4013, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3502, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3109, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 5.2632, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989098268236324e-05, | |
| "loss": 5.2113, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988259673485272e-05, | |
| "loss": 5.1805, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.98742107873422e-05, | |
| "loss": 5.1309, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.1241, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.0792, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0461, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0263, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983231380739706e-05, | |
| "loss": 5.0135, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 4.9822, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9566, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.9371, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9218, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790400448648195e-05, | |
| "loss": 4.8922, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8754, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8546, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8375, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 4.8217, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.8077, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 4.7973, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 4.7901, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.97233456261715e-05, | |
| "loss": 4.7719, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.7577, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.7452, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969818778363994e-05, | |
| "loss": 4.7374, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 4.7117, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 4.711, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 4.6792, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 4.6816, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6747, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6752, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6439, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 4.6497, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 4.6378, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.627, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6328, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.5953, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95892359812181e-05, | |
| "loss": 4.5983, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958085003370758e-05, | |
| "loss": 4.6034, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957246408619706e-05, | |
| "loss": 4.5958, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956407813868654e-05, | |
| "loss": 4.5771, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9555692191176016e-05, | |
| "loss": 4.5579, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9547306243665496e-05, | |
| "loss": 4.5662, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 4.5471, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 4.5715, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.952216477993767e-05, | |
| "loss": 4.5225, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951377883242715e-05, | |
| "loss": 4.5414, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.950539288491663e-05, | |
| "loss": 4.5312, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949700693740611e-05, | |
| "loss": 4.5223, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948863736869932e-05, | |
| "loss": 4.5258, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94802514211888e-05, | |
| "loss": 4.506, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947186547367828e-05, | |
| "loss": 4.4908, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 4.4933, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945510995746097e-05, | |
| "loss": 4.5072, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944672400995045e-05, | |
| "loss": 4.4777, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943833806243993e-05, | |
| "loss": 4.4792, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942995211492941e-05, | |
| "loss": 4.4632, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9421582546222625e-05, | |
| "loss": 4.4632, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9413196598712105e-05, | |
| "loss": 4.479, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404810651201585e-05, | |
| "loss": 4.4765, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396424703691065e-05, | |
| "loss": 4.4503, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388055134984274e-05, | |
| "loss": 4.4618, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379669187473754e-05, | |
| "loss": 4.4683, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371283239963234e-05, | |
| "loss": 4.4532, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.936291367125644e-05, | |
| "loss": 4.4324, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.935452772374592e-05, | |
| "loss": 4.4377, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.93461417762354e-05, | |
| "loss": 4.4333, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.933775582872488e-05, | |
| "loss": 4.4256, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932936988121436e-05, | |
| "loss": 4.4223, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932098393370384e-05, | |
| "loss": 4.4212, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931259798619332e-05, | |
| "loss": 4.4233, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930421203868281e-05, | |
| "loss": 4.4156, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.929584246997602e-05, | |
| "loss": 4.3953, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.92874565224655e-05, | |
| "loss": 4.3987, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927907057495498e-05, | |
| "loss": 4.4002, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927068462744446e-05, | |
| "loss": 4.3963, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.926231505873767e-05, | |
| "loss": 4.394, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925394549003088e-05, | |
| "loss": 4.3829, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924555954252036e-05, | |
| "loss": 4.3803, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923717359500984e-05, | |
| "loss": 4.3712, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9228804026303046e-05, | |
| "loss": 4.3827, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922041807879253e-05, | |
| "loss": 4.3731, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921203213128201e-05, | |
| "loss": 4.3736, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920364618377149e-05, | |
| "loss": 4.3721, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919526023626097e-05, | |
| "loss": 4.3563, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918687428875045e-05, | |
| "loss": 4.3602, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917848834123993e-05, | |
| "loss": 4.3535, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917010239372941e-05, | |
| "loss": 4.3519, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.916171644621889e-05, | |
| "loss": 4.3562, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.915333049870837e-05, | |
| "loss": 4.3373, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914496093000158e-05, | |
| "loss": 4.3435, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913657498249106e-05, | |
| "loss": 4.3424, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.912818903498054e-05, | |
| "loss": 4.3356, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911980308747002e-05, | |
| "loss": 4.326, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911143351876323e-05, | |
| "loss": 4.3262, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.910304757125272e-05, | |
| "loss": 4.3292, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90946616237422e-05, | |
| "loss": 4.3213, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908627567623168e-05, | |
| "loss": 4.3209, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907788972872115e-05, | |
| "loss": 4.3137, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906950378121063e-05, | |
| "loss": 4.306, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906111783370011e-05, | |
| "loss": 4.3248, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9052748264993326e-05, | |
| "loss": 4.3046, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90443623174828e-05, | |
| "loss": 4.3103, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903597636997228e-05, | |
| "loss": 4.3086, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.902759042246176e-05, | |
| "loss": 4.3084, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9019220853754975e-05, | |
| "loss": 4.3002, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9010834906244455e-05, | |
| "loss": 4.2947, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9002448958733935e-05, | |
| "loss": 4.3083, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994063011223415e-05, | |
| "loss": 4.2965, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8985677063712895e-05, | |
| "loss": 4.284, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8977291116202375e-05, | |
| "loss": 4.2857, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968905168691855e-05, | |
| "loss": 4.2817, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960535599985064e-05, | |
| "loss": 4.281, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952149652474544e-05, | |
| "loss": 4.2745, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943763704964024e-05, | |
| "loss": 4.279, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935377757453504e-05, | |
| "loss": 4.2668, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.892700818874671e-05, | |
| "loss": 4.2686, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891862224123619e-05, | |
| "loss": 4.2638, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891023629372567e-05, | |
| "loss": 4.2822, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890185034621515e-05, | |
| "loss": 4.2745, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889346439870464e-05, | |
| "loss": 4.2591, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888507845119412e-05, | |
| "loss": 4.2605, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887670888248733e-05, | |
| "loss": 4.2603, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886832293497681e-05, | |
| "loss": 4.2651, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885993698746629e-05, | |
| "loss": 4.262, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885155103995577e-05, | |
| "loss": 4.2549, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884318147124898e-05, | |
| "loss": 4.2565, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883479552373846e-05, | |
| "loss": 4.2531, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882640957622794e-05, | |
| "loss": 4.2487, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881802362871742e-05, | |
| "loss": 4.2411, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8809654060010626e-05, | |
| "loss": 4.2485, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8801268112500106e-05, | |
| "loss": 4.2396, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8792882164989586e-05, | |
| "loss": 4.2279, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878449621747907e-05, | |
| "loss": 4.2444, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877611026996855e-05, | |
| "loss": 4.2365, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.876774070126176e-05, | |
| "loss": 4.2419, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875935475375124e-05, | |
| "loss": 4.2289, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875096880624072e-05, | |
| "loss": 4.2292, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.196406364440918, | |
| "eval_runtime": 307.1617, | |
| "eval_samples_per_second": 1242.313, | |
| "eval_steps_per_second": 38.823, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.87425828587302e-05, | |
| "loss": 4.2194, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873421329002341e-05, | |
| "loss": 4.2166, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872582734251289e-05, | |
| "loss": 4.2304, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871744139500237e-05, | |
| "loss": 4.2154, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870905544749185e-05, | |
| "loss": 4.2235, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870066949998133e-05, | |
| "loss": 4.208, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.869229993127454e-05, | |
| "loss": 4.216, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683913983764027e-05, | |
| "loss": 4.2032, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675544415057236e-05, | |
| "loss": 4.2097, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667158467546716e-05, | |
| "loss": 4.2052, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658772520036196e-05, | |
| "loss": 4.214, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8650386572525676e-05, | |
| "loss": 4.2099, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8642000625015155e-05, | |
| "loss": 4.1929, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8633614677504635e-05, | |
| "loss": 4.1917, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862522872999411e-05, | |
| "loss": 4.1884, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.861684278248359e-05, | |
| "loss": 4.1866, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608473213776804e-05, | |
| "loss": 4.1889, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860008726626628e-05, | |
| "loss": 4.183, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591701318755764e-05, | |
| "loss": 4.183, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8583315371245244e-05, | |
| "loss": 4.1996, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857494580253846e-05, | |
| "loss": 4.1848, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856655985502793e-05, | |
| "loss": 4.1844, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.855817390751741e-05, | |
| "loss": 4.1786, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854978796000689e-05, | |
| "loss": 4.191, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854140201249637e-05, | |
| "loss": 4.1621, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853303244378958e-05, | |
| "loss": 4.1781, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852464649627906e-05, | |
| "loss": 4.1668, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851626054876854e-05, | |
| "loss": 4.1661, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850787460125802e-05, | |
| "loss": 4.1632, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849950503255123e-05, | |
| "loss": 4.1571, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849111908504072e-05, | |
| "loss": 4.1635, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84827331375302e-05, | |
| "loss": 4.1748, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847434719001968e-05, | |
| "loss": 4.1599, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.846597762131289e-05, | |
| "loss": 4.1622, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845759167380237e-05, | |
| "loss": 4.1534, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844920572629185e-05, | |
| "loss": 4.159, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844081977878133e-05, | |
| "loss": 4.149, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8432450210074536e-05, | |
| "loss": 4.1552, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8424064262564016e-05, | |
| "loss": 4.137, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415678315053496e-05, | |
| "loss": 4.1465, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8407292367542976e-05, | |
| "loss": 4.1427, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8398922798836185e-05, | |
| "loss": 4.1574, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839053685132567e-05, | |
| "loss": 4.1308, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838215090381515e-05, | |
| "loss": 4.1421, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.837376495630463e-05, | |
| "loss": 4.1417, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836539538759784e-05, | |
| "loss": 4.1322, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.835700944008732e-05, | |
| "loss": 4.1445, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83486234925768e-05, | |
| "loss": 4.1185, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834023754506628e-05, | |
| "loss": 4.1267, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.833186797635949e-05, | |
| "loss": 4.1391, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.832348202884897e-05, | |
| "loss": 4.1314, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831509608133845e-05, | |
| "loss": 4.123, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.830671013382793e-05, | |
| "loss": 4.1186, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.829834056512114e-05, | |
| "loss": 4.1222, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289954617610625e-05, | |
| "loss": 4.1119, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281568670100105e-05, | |
| "loss": 4.1302, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273182722589585e-05, | |
| "loss": 4.105, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8264813153882794e-05, | |
| "loss": 4.117, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8256427206372274e-05, | |
| "loss": 4.1177, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8248041258861754e-05, | |
| "loss": 4.111, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8239655311351234e-05, | |
| "loss": 4.1191, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823128574264444e-05, | |
| "loss": 4.1109, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.822289979513392e-05, | |
| "loss": 4.0973, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82145138476234e-05, | |
| "loss": 4.0967, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820612790011288e-05, | |
| "loss": 4.1218, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819775833140609e-05, | |
| "loss": 4.0991, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818937238389558e-05, | |
| "loss": 4.1, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818098643638506e-05, | |
| "loss": 4.0921, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817260048887454e-05, | |
| "loss": 4.0957, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816423092016775e-05, | |
| "loss": 4.1082, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.815584497265723e-05, | |
| "loss": 4.1111, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.814745902514671e-05, | |
| "loss": 4.0977, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.813907307763619e-05, | |
| "loss": 4.106, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.81307035089294e-05, | |
| "loss": 4.1112, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.812231756141888e-05, | |
| "loss": 4.1066, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.811393161390836e-05, | |
| "loss": 4.0856, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.810554566639784e-05, | |
| "loss": 4.0979, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8097176097691046e-05, | |
| "loss": 4.092, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808879015018053e-05, | |
| "loss": 4.0919, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808040420267001e-05, | |
| "loss": 4.0876, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.807201825515949e-05, | |
| "loss": 4.0915, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80636486864527e-05, | |
| "loss": 4.0988, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805527911774591e-05, | |
| "loss": 4.0923, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804689317023539e-05, | |
| "loss": 4.0735, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803850722272487e-05, | |
| "loss": 4.0777, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803012127521435e-05, | |
| "loss": 4.0832, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802173532770383e-05, | |
| "loss": 4.0853, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801334938019331e-05, | |
| "loss": 4.0843, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800496343268279e-05, | |
| "loss": 4.0722, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7996593863976e-05, | |
| "loss": 4.082, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7988207916465486e-05, | |
| "loss": 4.0623, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7979821968954966e-05, | |
| "loss": 4.0784, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7971436021444446e-05, | |
| "loss": 4.0778, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7963066452737655e-05, | |
| "loss": 4.0744, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7954696884030864e-05, | |
| "loss": 4.0813, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946310936520344e-05, | |
| "loss": 4.0668, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937924989009824e-05, | |
| "loss": 4.068, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929539041499304e-05, | |
| "loss": 4.071, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7921153093988784e-05, | |
| "loss": 4.0713, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912767146478264e-05, | |
| "loss": 4.0682, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7904381198967744e-05, | |
| "loss": 4.0626, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.789601163026095e-05, | |
| "loss": 4.0612, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.788762568275043e-05, | |
| "loss": 4.0688, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787925611404365e-05, | |
| "loss": 4.0678, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787087016653313e-05, | |
| "loss": 4.0473, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786248421902261e-05, | |
| "loss": 4.0595, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785409827151209e-05, | |
| "loss": 4.0659, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784571232400157e-05, | |
| "loss": 4.0578, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783734275529478e-05, | |
| "loss": 4.0527, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782895680778426e-05, | |
| "loss": 4.0527, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782057086027374e-05, | |
| "loss": 4.045, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781218491276322e-05, | |
| "loss": 4.0681, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78037989652527e-05, | |
| "loss": 4.0491, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779541301774218e-05, | |
| "loss": 4.0554, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778702707023166e-05, | |
| "loss": 4.0529, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777864112272114e-05, | |
| "loss": 4.0629, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777028793281808e-05, | |
| "loss": 4.0491, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.776190198530756e-05, | |
| "loss": 4.0528, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775351603779704e-05, | |
| "loss": 4.0592, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774513009028652e-05, | |
| "loss": 4.0503, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736744142776e-05, | |
| "loss": 4.0429, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772835819526548e-05, | |
| "loss": 4.0441, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7719972247754956e-05, | |
| "loss": 4.0386, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771160267904817e-05, | |
| "loss": 4.0421, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770321673153765e-05, | |
| "loss": 4.0409, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769483078402713e-05, | |
| "loss": 4.0414, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768644483651661e-05, | |
| "loss": 4.0344, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767805888900609e-05, | |
| "loss": 4.0365, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766967294149557e-05, | |
| "loss": 4.0331, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766130337278878e-05, | |
| "loss": 4.0503, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765291742527826e-05, | |
| "loss": 4.0501, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764453147776774e-05, | |
| "loss": 4.0376, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763614553025722e-05, | |
| "loss": 4.0271, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76277595827467e-05, | |
| "loss": 4.0402, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761937363523618e-05, | |
| "loss": 4.0398, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761098768772566e-05, | |
| "loss": 4.0406, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760260174021514e-05, | |
| "loss": 4.0347, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.759424855031208e-05, | |
| "loss": 4.0393, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7585862602801565e-05, | |
| "loss": 4.0324, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7577476655291045e-05, | |
| "loss": 4.0345, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7569090707780525e-05, | |
| "loss": 4.0296, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7560704760270005e-05, | |
| "loss": 4.0319, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7552318812759485e-05, | |
| "loss": 4.0257, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543932865248965e-05, | |
| "loss": 4.0191, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7535546917738445e-05, | |
| "loss": 4.0346, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7527177349031654e-05, | |
| "loss": 4.0275, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518791401521134e-05, | |
| "loss": 4.0317, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510405454010614e-05, | |
| "loss": 4.0216, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7502019506500094e-05, | |
| "loss": 4.0231, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.026560306549072, | |
| "eval_runtime": 307.2428, | |
| "eval_samples_per_second": 1241.985, | |
| "eval_steps_per_second": 38.813, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7493633558989574e-05, | |
| "loss": 4.0154, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7485247611479054e-05, | |
| "loss": 4.0137, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7476861663968534e-05, | |
| "loss": 4.0273, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7468475716458014e-05, | |
| "loss": 4.0172, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7460089768947494e-05, | |
| "loss": 4.0273, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451703821436974e-05, | |
| "loss": 4.0092, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7443317873926453e-05, | |
| "loss": 4.0198, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434931926415933e-05, | |
| "loss": 4.0071, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.742656235770914e-05, | |
| "loss": 4.0183, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.741817641019862e-05, | |
| "loss": 4.012, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74097904626881e-05, | |
| "loss": 4.0205, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740140451517758e-05, | |
| "loss": 4.0158, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739301856766706e-05, | |
| "loss": 4.0013, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.738463262015654e-05, | |
| "loss": 4.007, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.737626305144975e-05, | |
| "loss": 4.0019, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736787710393923e-05, | |
| "loss": 4.0026, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735949115642871e-05, | |
| "loss": 4.0028, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.73511052089182e-05, | |
| "loss": 3.9984, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.734271926140768e-05, | |
| "loss": 4.0, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733434969270089e-05, | |
| "loss": 4.02, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732596374519037e-05, | |
| "loss": 4.0052, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.731757779767985e-05, | |
| "loss": 4.0036, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730919185016933e-05, | |
| "loss": 4.0017, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730080590265881e-05, | |
| "loss": 4.0088, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7292436333952016e-05, | |
| "loss": 3.9851, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7284050386441496e-05, | |
| "loss": 4.0005, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7275664438930976e-05, | |
| "loss": 3.9973, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7267278491420456e-05, | |
| "loss": 3.991, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7258892543909936e-05, | |
| "loss": 3.986, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7250506596399416e-05, | |
| "loss": 3.9848, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724213702769263e-05, | |
| "loss": 3.9931, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.723375108018211e-05, | |
| "loss": 4.0048, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722536513267159e-05, | |
| "loss": 3.9935, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721697918516107e-05, | |
| "loss": 3.9935, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720859323765055e-05, | |
| "loss": 3.9879, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720022366894376e-05, | |
| "loss": 3.9894, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719183772143324e-05, | |
| "loss": 3.9823, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718345177392272e-05, | |
| "loss": 3.9953, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71750658264122e-05, | |
| "loss": 3.9707, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716667987890168e-05, | |
| "loss": 3.9845, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.715831031019489e-05, | |
| "loss": 3.9789, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714992436268437e-05, | |
| "loss": 3.9941, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714153841517385e-05, | |
| "loss": 3.9747, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.713315246766333e-05, | |
| "loss": 3.9798, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.712476652015281e-05, | |
| "loss": 3.984, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7116396951446025e-05, | |
| "loss": 3.9761, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7108011003935505e-05, | |
| "loss": 3.9842, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709962505642498e-05, | |
| "loss": 3.965, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709123910891446e-05, | |
| "loss": 3.9715, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7082869540207674e-05, | |
| "loss": 3.9838, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7074483592697154e-05, | |
| "loss": 3.9725, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706609764518663e-05, | |
| "loss": 3.9693, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705771169767611e-05, | |
| "loss": 3.9651, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704934212896932e-05, | |
| "loss": 3.9728, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.70409561814588e-05, | |
| "loss": 3.9576, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703257023394828e-05, | |
| "loss": 3.9744, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702418428643776e-05, | |
| "loss": 3.9552, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701579833892724e-05, | |
| "loss": 3.9689, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.700742877022045e-05, | |
| "loss": 3.9733, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699904282270993e-05, | |
| "loss": 3.9605, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699065687519941e-05, | |
| "loss": 3.9675, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.698227092768889e-05, | |
| "loss": 3.9694, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.697388498017837e-05, | |
| "loss": 3.9492, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.696551541147158e-05, | |
| "loss": 3.9551, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695712946396106e-05, | |
| "loss": 3.9676, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694874351645054e-05, | |
| "loss": 3.9602, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694035756894003e-05, | |
| "loss": 3.957, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693198800023324e-05, | |
| "loss": 3.9462, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692360205272272e-05, | |
| "loss": 3.9501, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69152161052122e-05, | |
| "loss": 3.9675, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6906830157701677e-05, | |
| "loss": 3.9677, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6898444210191157e-05, | |
| "loss": 3.9534, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6890058262680636e-05, | |
| "loss": 3.967, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6881688693973846e-05, | |
| "loss": 3.9717, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6873302746463326e-05, | |
| "loss": 3.9718, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6864916798952806e-05, | |
| "loss": 3.9429, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6856530851442285e-05, | |
| "loss": 3.9624, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6848161282735495e-05, | |
| "loss": 3.9544, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683977533522498e-05, | |
| "loss": 3.9505, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683138938771446e-05, | |
| "loss": 3.9549, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.682300344020394e-05, | |
| "loss": 3.9554, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.681463387149715e-05, | |
| "loss": 3.959, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680624792398663e-05, | |
| "loss": 3.961, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.679786197647611e-05, | |
| "loss": 3.9374, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678947602896559e-05, | |
| "loss": 3.9456, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678109008145507e-05, | |
| "loss": 3.948, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677272051274828e-05, | |
| "loss": 3.9517, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676433456523776e-05, | |
| "loss": 3.9541, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675594861772724e-05, | |
| "loss": 3.939, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674756267021672e-05, | |
| "loss": 3.9522, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739193101509935e-05, | |
| "loss": 3.9324, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6730807153999415e-05, | |
| "loss": 3.9476, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722421206488895e-05, | |
| "loss": 3.9458, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6714035258978375e-05, | |
| "loss": 3.9449, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6705649311467855e-05, | |
| "loss": 3.9521, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6697279742761064e-05, | |
| "loss": 3.946, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6688893795250544e-05, | |
| "loss": 3.9368, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6680507847740024e-05, | |
| "loss": 3.9474, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6672121900229504e-05, | |
| "loss": 3.9448, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666375233152271e-05, | |
| "loss": 3.9431, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665536638401219e-05, | |
| "loss": 3.9345, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664698043650167e-05, | |
| "loss": 3.9379, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663859448899115e-05, | |
| "loss": 3.9424, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663020854148063e-05, | |
| "loss": 3.9441, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662183897277385e-05, | |
| "loss": 3.9209, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661345302526333e-05, | |
| "loss": 3.9345, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660506707775281e-05, | |
| "loss": 3.9449, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659668113024229e-05, | |
| "loss": 3.9355, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.658829518273176e-05, | |
| "loss": 3.9297, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657990923522124e-05, | |
| "loss": 3.9328, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657153966651446e-05, | |
| "loss": 3.9265, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656315371900393e-05, | |
| "loss": 3.9452, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655476777149341e-05, | |
| "loss": 3.9272, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654638182398289e-05, | |
| "loss": 3.9368, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6538012255276106e-05, | |
| "loss": 3.9376, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6529626307765586e-05, | |
| "loss": 3.9404, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6521240360255066e-05, | |
| "loss": 3.9315, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6512854412744546e-05, | |
| "loss": 3.9368, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6504468465234026e-05, | |
| "loss": 3.9401, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6496098896527235e-05, | |
| "loss": 3.9332, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6487712949016715e-05, | |
| "loss": 3.922, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6479327001506195e-05, | |
| "loss": 3.9263, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6470941053995675e-05, | |
| "loss": 3.9222, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6462571485288884e-05, | |
| "loss": 3.929, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6454185537778364e-05, | |
| "loss": 3.9235, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6445799590267844e-05, | |
| "loss": 3.9287, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6437413642757324e-05, | |
| "loss": 3.9176, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642904407405054e-05, | |
| "loss": 3.9199, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642065812654002e-05, | |
| "loss": 3.9184, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64122721790295e-05, | |
| "loss": 3.937, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640388623151898e-05, | |
| "loss": 3.9365, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639551666281219e-05, | |
| "loss": 3.9285, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638713071530167e-05, | |
| "loss": 3.9141, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637874476779115e-05, | |
| "loss": 3.9269, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637035882028063e-05, | |
| "loss": 3.9302, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636198925157384e-05, | |
| "loss": 3.9266, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635360330406332e-05, | |
| "loss": 3.9218, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63452173565528e-05, | |
| "loss": 3.9286, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633683140904228e-05, | |
| "loss": 3.9224, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6328461840335494e-05, | |
| "loss": 3.9283, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6320075892824974e-05, | |
| "loss": 3.9147, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6311689945314454e-05, | |
| "loss": 3.9247, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303303997803934e-05, | |
| "loss": 3.915, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6294918050293413e-05, | |
| "loss": 3.907, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6286532102782893e-05, | |
| "loss": 3.9241, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.62781625340761e-05, | |
| "loss": 3.9227, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626977658656558e-05, | |
| "loss": 3.9193, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626139063905506e-05, | |
| "loss": 3.9166, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625300469154454e-05, | |
| "loss": 3.9118, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.9520654678344727, | |
| "eval_runtime": 302.4622, | |
| "eval_samples_per_second": 1261.615, | |
| "eval_steps_per_second": 39.426, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.624461874403402e-05, | |
| "loss": 3.9135, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.62362327965235e-05, | |
| "loss": 3.9081, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.622784684901298e-05, | |
| "loss": 3.9207, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.621946090150246e-05, | |
| "loss": 3.9083, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.621107495399194e-05, | |
| "loss": 3.9281, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.620268900648142e-05, | |
| "loss": 3.9004, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.61943030589709e-05, | |
| "loss": 3.9174, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.618591711146038e-05, | |
| "loss": 3.9012, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.617754754275359e-05, | |
| "loss": 3.916, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.616916159524307e-05, | |
| "loss": 3.9102, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.616077564773255e-05, | |
| "loss": 3.9076, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.615238970022203e-05, | |
| "loss": 3.9166, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.614402013151524e-05, | |
| "loss": 3.8959, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.613563418400472e-05, | |
| "loss": 3.9045, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.61272482364942e-05, | |
| "loss": 3.9004, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.611886228898368e-05, | |
| "loss": 3.9011, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6110492720276896e-05, | |
| "loss": 3.9022, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6102106772766376e-05, | |
| "loss": 3.8991, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6093720825255856e-05, | |
| "loss": 3.8985, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6085334877745336e-05, | |
| "loss": 3.9197, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6076965309038545e-05, | |
| "loss": 3.9037, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6068579361528025e-05, | |
| "loss": 3.9053, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6060193414017505e-05, | |
| "loss": 3.9047, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6051807466506985e-05, | |
| "loss": 3.9069, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6043437897800194e-05, | |
| "loss": 3.8877, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6035051950289674e-05, | |
| "loss": 3.9098, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6026666002779154e-05, | |
| "loss": 3.8936, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6018280055268634e-05, | |
| "loss": 3.894, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6009894107758114e-05, | |
| "loss": 3.8855, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.600152453905133e-05, | |
| "loss": 3.8943, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.599313859154081e-05, | |
| "loss": 3.8946, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.598475264403029e-05, | |
| "loss": 3.9092, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.59763830753235e-05, | |
| "loss": 3.901, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.596799712781298e-05, | |
| "loss": 3.8959, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.595961118030246e-05, | |
| "loss": 3.8881, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.595122523279194e-05, | |
| "loss": 3.8977, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.594283928528142e-05, | |
| "loss": 3.8845, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.593446971657463e-05, | |
| "loss": 3.902, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.592608376906411e-05, | |
| "loss": 3.8762, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.591769782155359e-05, | |
| "loss": 3.8878, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.590931187404307e-05, | |
| "loss": 3.8873, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.590094230533628e-05, | |
| "loss": 3.9013, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.589255635782576e-05, | |
| "loss": 3.8845, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.588417041031524e-05, | |
| "loss": 3.8863, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.587578446280472e-05, | |
| "loss": 3.8902, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.586741489409793e-05, | |
| "loss": 3.8868, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.585902894658741e-05, | |
| "loss": 3.8919, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.585064299907689e-05, | |
| "loss": 3.8773, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.584225705156637e-05, | |
| "loss": 3.8737, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.583388748285958e-05, | |
| "loss": 3.8914, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.582550153534906e-05, | |
| "loss": 3.8808, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.581711558783854e-05, | |
| "loss": 3.8794, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.580872964032802e-05, | |
| "loss": 3.8765, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.580036007162124e-05, | |
| "loss": 3.8807, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.579197412411072e-05, | |
| "loss": 3.8659, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.57835881766002e-05, | |
| "loss": 3.8854, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.577520222908968e-05, | |
| "loss": 3.8655, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5766832660382886e-05, | |
| "loss": 3.882, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5758446712872366e-05, | |
| "loss": 3.8781, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5750060765361846e-05, | |
| "loss": 3.8808, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5741674817851326e-05, | |
| "loss": 3.8724, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5733305249144535e-05, | |
| "loss": 3.8828, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5724919301634015e-05, | |
| "loss": 3.8602, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5716533354123495e-05, | |
| "loss": 3.8707, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5708147406612975e-05, | |
| "loss": 3.8792, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.569977783790619e-05, | |
| "loss": 3.8757, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.569139189039567e-05, | |
| "loss": 3.866, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.568300594288515e-05, | |
| "loss": 3.866, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.567461999537463e-05, | |
| "loss": 3.8647, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.566625042666784e-05, | |
| "loss": 3.8754, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.565786447915732e-05, | |
| "loss": 3.8821, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.56494785316468e-05, | |
| "loss": 3.8702, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.564109258413628e-05, | |
| "loss": 3.8829, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.563272301542949e-05, | |
| "loss": 3.8851, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.562433706791897e-05, | |
| "loss": 3.8899, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.561595112040845e-05, | |
| "loss": 3.856, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.560756517289793e-05, | |
| "loss": 3.8733, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5599195604191144e-05, | |
| "loss": 3.8761, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5590809656680624e-05, | |
| "loss": 3.8622, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5582423709170104e-05, | |
| "loss": 3.8716, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5574037761659584e-05, | |
| "loss": 3.8754, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.556566819295279e-05, | |
| "loss": 3.8768, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.555728224544227e-05, | |
| "loss": 3.8689, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.554889629793175e-05, | |
| "loss": 3.8593, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.554051035042123e-05, | |
| "loss": 3.8588, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.553214078171444e-05, | |
| "loss": 3.8636, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.552375483420392e-05, | |
| "loss": 3.8712, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.55153688866934e-05, | |
| "loss": 3.8725, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.550698293918288e-05, | |
| "loss": 3.8556, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.54986133704761e-05, | |
| "loss": 3.8677, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.549022742296558e-05, | |
| "loss": 3.8528, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.548184147545506e-05, | |
| "loss": 3.861, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.547345552794454e-05, | |
| "loss": 3.8664, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.546508595923775e-05, | |
| "loss": 3.8607, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.545670001172723e-05, | |
| "loss": 3.8745, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.544831406421671e-05, | |
| "loss": 3.8642, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.543992811670619e-05, | |
| "loss": 3.8578, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5431558547999396e-05, | |
| "loss": 3.8667, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5423172600488876e-05, | |
| "loss": 3.8649, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5414786652978356e-05, | |
| "loss": 3.8657, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5406400705467836e-05, | |
| "loss": 3.8523, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.539803113676105e-05, | |
| "loss": 3.8586, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.538964518925053e-05, | |
| "loss": 3.8633, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.538125924174001e-05, | |
| "loss": 3.8678, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.537287329422949e-05, | |
| "loss": 3.8462, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.53645037255227e-05, | |
| "loss": 3.8505, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.535611777801218e-05, | |
| "loss": 3.8662, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.534773183050166e-05, | |
| "loss": 3.8594, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.533934588299114e-05, | |
| "loss": 3.8501, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.533097631428435e-05, | |
| "loss": 3.8509, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.532259036677383e-05, | |
| "loss": 3.8546, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.531420441926331e-05, | |
| "loss": 3.8612, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.530581847175279e-05, | |
| "loss": 3.8525, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5297448903046005e-05, | |
| "loss": 3.8546, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5289062955535485e-05, | |
| "loss": 3.8618, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5280677008024965e-05, | |
| "loss": 3.864, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5272291060514445e-05, | |
| "loss": 3.8623, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5263921491807654e-05, | |
| "loss": 3.8534, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5255535544297134e-05, | |
| "loss": 3.8674, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5247149596786614e-05, | |
| "loss": 3.857, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.523876364927609e-05, | |
| "loss": 3.8436, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.52303940805693e-05, | |
| "loss": 3.8517, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.522200813305878e-05, | |
| "loss": 3.8475, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.521362218554826e-05, | |
| "loss": 3.8522, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.520523623803774e-05, | |
| "loss": 3.8451, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.519686666933096e-05, | |
| "loss": 3.8555, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.518848072182044e-05, | |
| "loss": 3.8434, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.518009477430992e-05, | |
| "loss": 3.8443, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.517170882679939e-05, | |
| "loss": 3.8441, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.516333925809261e-05, | |
| "loss": 3.8625, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.515495331058209e-05, | |
| "loss": 3.8551, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.514656736307156e-05, | |
| "loss": 3.8551, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.513818141556104e-05, | |
| "loss": 3.8395, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.512981184685426e-05, | |
| "loss": 3.8555, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.512142589934374e-05, | |
| "loss": 3.8583, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.511303995183322e-05, | |
| "loss": 3.8492, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5104654004322697e-05, | |
| "loss": 3.8474, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.509628443561591e-05, | |
| "loss": 3.8555, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.508789848810539e-05, | |
| "loss": 3.8486, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5079512540594866e-05, | |
| "loss": 3.8565, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5071126593084346e-05, | |
| "loss": 3.8396, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.506275702437756e-05, | |
| "loss": 3.8532, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5054371076867035e-05, | |
| "loss": 3.8418, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5045985129356515e-05, | |
| "loss": 3.8393, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5037599181845995e-05, | |
| "loss": 3.8485, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.502922961313921e-05, | |
| "loss": 3.8532, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5020843665628684e-05, | |
| "loss": 3.8467, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.501245771811817e-05, | |
| "loss": 3.8399, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.500407177060765e-05, | |
| "loss": 3.8436, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.9110300540924072, | |
| "eval_runtime": 305.4762, | |
| "eval_samples_per_second": 1249.168, | |
| "eval_steps_per_second": 39.037, | |
| "step": 305280 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 2.1253318546540954e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |