| { | |
| "best_metric": 4.159285068511963, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/3/checkpoints/checkpoint-457920", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 457920, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.8215, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 7.5553, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 7.06, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 6.9993, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 6.9452, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 6.887, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 6.7272, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 6.623, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 6.5272, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 6.4534, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 6.3851, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 6.3195, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 6.2438, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989098268236324e-05, | |
| "loss": 6.1818, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988259673485272e-05, | |
| "loss": 6.1253, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.98742107873422e-05, | |
| "loss": 6.0748, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986582483983168e-05, | |
| "loss": 6.0306, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985743889232116e-05, | |
| "loss": 5.9982, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984905294481064e-05, | |
| "loss": 5.9498, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.9156, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.8805, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.982391148108281e-05, | |
| "loss": 5.8319, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.981552553357229e-05, | |
| "loss": 5.8005, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 5.7676, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 5.7518, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 5.7142, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9781998122333946e-05, | |
| "loss": 5.6862, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773612174823426e-05, | |
| "loss": 5.667, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.97652262273129e-05, | |
| "loss": 5.6411, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.975684027980238e-05, | |
| "loss": 5.6139, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974845433229186e-05, | |
| "loss": 5.5904, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.974008476358507e-05, | |
| "loss": 5.5832, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.973169881607455e-05, | |
| "loss": 5.5618, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.972331286856403e-05, | |
| "loss": 5.5511, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9714926921053515e-05, | |
| "loss": 5.5301, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9706557352346724e-05, | |
| "loss": 5.5108, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9698171404836204e-05, | |
| "loss": 5.5063, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9689785457325684e-05, | |
| "loss": 5.4668, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9681399509815164e-05, | |
| "loss": 5.4575, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9673013562304644e-05, | |
| "loss": 5.44, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966464399359785e-05, | |
| "loss": 5.4334, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965625804608733e-05, | |
| "loss": 5.4098, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964787209857681e-05, | |
| "loss": 5.4172, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963948615106629e-05, | |
| "loss": 5.3823, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 5.3822, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962273063484898e-05, | |
| "loss": 5.3764, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.961434468733847e-05, | |
| "loss": 5.3677, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960595873982795e-05, | |
| "loss": 5.3599, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959757279231743e-05, | |
| "loss": 5.3376, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 5.3161, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958081727610012e-05, | |
| "loss": 5.3305, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95724313285896e-05, | |
| "loss": 5.3098, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 5.2965, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955567581237229e-05, | |
| "loss": 5.2795, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.954728986486177e-05, | |
| "loss": 5.2859, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.953890391735125e-05, | |
| "loss": 5.2659, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530517969840727e-05, | |
| "loss": 5.28, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522132022330207e-05, | |
| "loss": 5.2493, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9513746074819686e-05, | |
| "loss": 5.2411, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9505360127309166e-05, | |
| "loss": 5.2422, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9496974179798646e-05, | |
| "loss": 5.2266, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948858823228813e-05, | |
| "loss": 5.2044, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948021866358134e-05, | |
| "loss": 5.2103, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947183271607082e-05, | |
| "loss": 5.1871, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94634467685603e-05, | |
| "loss": 5.1888, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945506082104978e-05, | |
| "loss": 5.1996, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9446674873539255e-05, | |
| "loss": 5.1912, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9438288926028735e-05, | |
| "loss": 5.172, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9429902978518215e-05, | |
| "loss": 5.1534, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9421517031007695e-05, | |
| "loss": 5.1481, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9413131083497175e-05, | |
| "loss": 5.1584, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.940477789359412e-05, | |
| "loss": 5.1469, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.93963919460836e-05, | |
| "loss": 5.1432, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.938800599857309e-05, | |
| "loss": 5.1413, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.937962005106256e-05, | |
| "loss": 5.1408, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.937123410355204e-05, | |
| "loss": 5.1315, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.936284815604152e-05, | |
| "loss": 5.1038, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354462208531e-05, | |
| "loss": 5.1086, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.934607626102048e-05, | |
| "loss": 5.1027, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.933769031350996e-05, | |
| "loss": 5.0883, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932932074480317e-05, | |
| "loss": 5.1068, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932093479729265e-05, | |
| "loss": 5.0859, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931254884978213e-05, | |
| "loss": 5.0922, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930416290227161e-05, | |
| "loss": 5.0771, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295793333564825e-05, | |
| "loss": 5.0525, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287407386054305e-05, | |
| "loss": 5.0583, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 5.0611, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9270651869836994e-05, | |
| "loss": 5.0563, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9262265922326474e-05, | |
| "loss": 5.0505, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9253879974815954e-05, | |
| "loss": 5.0374, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924551040610916e-05, | |
| "loss": 5.0355, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923712445859864e-05, | |
| "loss": 5.0247, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922873851108812e-05, | |
| "loss": 5.0171, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.92203525635776e-05, | |
| "loss": 5.0155, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921196661606708e-05, | |
| "loss": 5.008, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920358066855656e-05, | |
| "loss": 5.0159, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919519472104604e-05, | |
| "loss": 4.9988, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918680877353552e-05, | |
| "loss": 4.9903, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917842282602501e-05, | |
| "loss": 4.9836, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917003687851449e-05, | |
| "loss": 4.9863, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91616673098077e-05, | |
| "loss": 4.9844, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.915328136229718e-05, | |
| "loss": 4.972, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914489541478666e-05, | |
| "loss": 4.9655, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913650946727614e-05, | |
| "loss": 4.9507, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.912812351976562e-05, | |
| "loss": 4.9588, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911975395105883e-05, | |
| "loss": 4.9455, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911136800354831e-05, | |
| "loss": 4.9402, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9102998434841516e-05, | |
| "loss": 4.942, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9094612487330996e-05, | |
| "loss": 4.9412, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9086226539820476e-05, | |
| "loss": 4.934, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907784059230996e-05, | |
| "loss": 4.9307, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906945464479944e-05, | |
| "loss": 4.9209, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906106869728892e-05, | |
| "loss": 4.9214, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9052682749778396e-05, | |
| "loss": 4.9111, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9044296802267876e-05, | |
| "loss": 4.9058, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9035910854757356e-05, | |
| "loss": 4.9145, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9027541286050565e-05, | |
| "loss": 4.9045, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901917171734378e-05, | |
| "loss": 4.9033, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901078576983326e-05, | |
| "loss": 4.8934, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.900239982232274e-05, | |
| "loss": 4.9003, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994013874812214e-05, | |
| "loss": 4.8959, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.89856279273017e-05, | |
| "loss": 4.8768, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897725835859491e-05, | |
| "loss": 4.8927, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968888789888125e-05, | |
| "loss": 4.8795, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960502842377605e-05, | |
| "loss": 4.8807, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952116894867085e-05, | |
| "loss": 4.8594, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943730947356565e-05, | |
| "loss": 4.8535, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.893534499984604e-05, | |
| "loss": 4.8489, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.892695905233552e-05, | |
| "loss": 4.8533, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918573104825e-05, | |
| "loss": 4.8427, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891018715731448e-05, | |
| "loss": 4.8606, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890180120980396e-05, | |
| "loss": 4.8555, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889341526229344e-05, | |
| "loss": 4.8375, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888502931478292e-05, | |
| "loss": 4.8421, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88766433672724e-05, | |
| "loss": 4.8377, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886825741976188e-05, | |
| "loss": 4.8377, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8859871472251365e-05, | |
| "loss": 4.8376, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8851501903544574e-05, | |
| "loss": 4.8381, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884313233483778e-05, | |
| "loss": 4.8197, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883474638732726e-05, | |
| "loss": 4.8222, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882636043981674e-05, | |
| "loss": 4.8165, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881797449230622e-05, | |
| "loss": 4.8068, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88095885447957e-05, | |
| "loss": 4.8206, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880120259728518e-05, | |
| "loss": 4.8058, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879281664977466e-05, | |
| "loss": 4.7958, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878444708106787e-05, | |
| "loss": 4.8108, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877606113355735e-05, | |
| "loss": 4.794, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.876767518604683e-05, | |
| "loss": 4.7911, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875928923853632e-05, | |
| "loss": 4.7942, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87509032910258e-05, | |
| "loss": 4.787, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.749973297119141, | |
| "eval_runtime": 308.6341, | |
| "eval_samples_per_second": 1236.386, | |
| "eval_steps_per_second": 38.638, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874251734351528e-05, | |
| "loss": 4.773, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873413139600475e-05, | |
| "loss": 4.7833, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872574544849423e-05, | |
| "loss": 4.7971, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.871735950098371e-05, | |
| "loss": 4.7722, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870897355347319e-05, | |
| "loss": 4.7864, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870058760596267e-05, | |
| "loss": 4.7584, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.869221803725588e-05, | |
| "loss": 4.7752, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683848468549096e-05, | |
| "loss": 4.7521, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675462521038576e-05, | |
| "loss": 4.7619, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667076573528056e-05, | |
| "loss": 4.7614, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658690626017536e-05, | |
| "loss": 4.7653, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8650304678507016e-05, | |
| "loss": 4.7597, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8641935109800225e-05, | |
| "loss": 4.7424, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8633549162289705e-05, | |
| "loss": 4.7391, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862517959358292e-05, | |
| "loss": 4.7341, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.86167936460724e-05, | |
| "loss": 4.7398, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608407698561874e-05, | |
| "loss": 4.7517, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8600021751051354e-05, | |
| "loss": 4.7337, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591635803540834e-05, | |
| "loss": 4.7408, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8583249856030314e-05, | |
| "loss": 4.7521, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8574863908519794e-05, | |
| "loss": 4.727, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8566477961009274e-05, | |
| "loss": 4.7239, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558092013498754e-05, | |
| "loss": 4.7244, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854970606598824e-05, | |
| "loss": 4.7358, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854132011847772e-05, | |
| "loss": 4.7148, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853295054977093e-05, | |
| "loss": 4.7168, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852456460226041e-05, | |
| "loss": 4.7152, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851617865474989e-05, | |
| "loss": 4.7141, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850779270723937e-05, | |
| "loss": 4.6959, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849942313853258e-05, | |
| "loss": 4.7005, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849103719102206e-05, | |
| "loss": 4.6971, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848265124351154e-05, | |
| "loss": 4.7056, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847426529600102e-05, | |
| "loss": 4.7102, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84658793484905e-05, | |
| "loss": 4.6944, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845749340097998e-05, | |
| "loss": 4.6919, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844910745346946e-05, | |
| "loss": 4.7031, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844072150595894e-05, | |
| "loss": 4.6767, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.843233555844842e-05, | |
| "loss": 4.6836, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8423965989741634e-05, | |
| "loss": 4.6719, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415580042231114e-05, | |
| "loss": 4.6799, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840719409472059e-05, | |
| "loss": 4.6702, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83988245260138e-05, | |
| "loss": 4.6856, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839043857850328e-05, | |
| "loss": 4.6668, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838205263099276e-05, | |
| "loss": 4.6745, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8373666683482236e-05, | |
| "loss": 4.6671, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8365280735971716e-05, | |
| "loss": 4.6726, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8356894788461196e-05, | |
| "loss": 4.6692, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8348508840950676e-05, | |
| "loss": 4.6585, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834013927224389e-05, | |
| "loss": 4.6463, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.833175332473337e-05, | |
| "loss": 4.6697, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.832336737722285e-05, | |
| "loss": 4.6572, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831498142971233e-05, | |
| "loss": 4.6495, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.830659548220181e-05, | |
| "loss": 4.6443, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.829820953469129e-05, | |
| "loss": 4.6514, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828982358718077e-05, | |
| "loss": 4.6403, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828143763967025e-05, | |
| "loss": 4.6595, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.827305169215973e-05, | |
| "loss": 4.6405, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.826468212345294e-05, | |
| "loss": 4.6373, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825629617594242e-05, | |
| "loss": 4.6466, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82479102284319e-05, | |
| "loss": 4.6268, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823952428092138e-05, | |
| "loss": 4.6222, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823113833341086e-05, | |
| "loss": 4.6336, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222768764704077e-05, | |
| "loss": 4.6131, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8214382817193557e-05, | |
| "loss": 4.6217, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8205996869683037e-05, | |
| "loss": 4.6315, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8197610922172516e-05, | |
| "loss": 4.6349, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8189224974661996e-05, | |
| "loss": 4.6215, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8180839027151476e-05, | |
| "loss": 4.6132, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817245307964095e-05, | |
| "loss": 4.6066, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8164083510934165e-05, | |
| "loss": 4.6256, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155697563423645e-05, | |
| "loss": 4.6192, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147311615913125e-05, | |
| "loss": 4.6198, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8138942047206334e-05, | |
| "loss": 4.6181, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130556099695814e-05, | |
| "loss": 4.6265, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122170152185294e-05, | |
| "loss": 4.6207, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113784204674774e-05, | |
| "loss": 4.5981, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8105398257164254e-05, | |
| "loss": 4.6107, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8097012309653734e-05, | |
| "loss": 4.6092, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8088626362143214e-05, | |
| "loss": 4.5906, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8080240414632694e-05, | |
| "loss": 4.6198, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80718708459259e-05, | |
| "loss": 4.6045, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.806348489841538e-05, | |
| "loss": 4.6099, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805509895090486e-05, | |
| "loss": 4.6065, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804671300339434e-05, | |
| "loss": 4.5812, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803832705588382e-05, | |
| "loss": 4.5973, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80299411083733e-05, | |
| "loss": 4.5946, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802155516086278e-05, | |
| "loss": 4.5978, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8013185592156e-05, | |
| "loss": 4.5957, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800481602344921e-05, | |
| "loss": 4.5913, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799643007593869e-05, | |
| "loss": 4.5919, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798804412842817e-05, | |
| "loss": 4.5878, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797967455972138e-05, | |
| "loss": 4.573, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797128861221086e-05, | |
| "loss": 4.58, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796290266470034e-05, | |
| "loss": 4.5838, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.795451671718982e-05, | |
| "loss": 4.5871, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946147148483026e-05, | |
| "loss": 4.5745, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937761200972506e-05, | |
| "loss": 4.5683, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929375253461986e-05, | |
| "loss": 4.5696, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792098930595147e-05, | |
| "loss": 4.5779, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.791260335844095e-05, | |
| "loss": 4.5759, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790421741093043e-05, | |
| "loss": 4.5692, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.789583146341991e-05, | |
| "loss": 4.561, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.788744551590939e-05, | |
| "loss": 4.555, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78790759472026e-05, | |
| "loss": 4.5643, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787068999969208e-05, | |
| "loss": 4.5512, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786230405218156e-05, | |
| "loss": 4.5558, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785391810467104e-05, | |
| "loss": 4.5576, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784554853596425e-05, | |
| "loss": 4.557, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783716258845373e-05, | |
| "loss": 4.5544, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782877664094321e-05, | |
| "loss": 4.559, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782040707223642e-05, | |
| "loss": 4.5464, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7812021124725906e-05, | |
| "loss": 4.549, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7803635177215386e-05, | |
| "loss": 4.5459, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7795249229704866e-05, | |
| "loss": 4.5408, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7786863282194346e-05, | |
| "loss": 4.556, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7778477334683826e-05, | |
| "loss": 4.5489, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7770091387173306e-05, | |
| "loss": 4.5484, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7761705439662786e-05, | |
| "loss": 4.5413, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7753335870955995e-05, | |
| "loss": 4.5491, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7744949923445475e-05, | |
| "loss": 4.5463, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736563975934955e-05, | |
| "loss": 4.5331, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7728178028424435e-05, | |
| "loss": 4.5536, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7719808459717644e-05, | |
| "loss": 4.5384, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7711422512207124e-05, | |
| "loss": 4.5417, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7703036564696604e-05, | |
| "loss": 4.5251, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7694650617186084e-05, | |
| "loss": 4.5262, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7686264669675564e-05, | |
| "loss": 4.5203, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767789510096878e-05, | |
| "loss": 4.5235, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766950915345826e-05, | |
| "loss": 4.5192, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766112320594773e-05, | |
| "loss": 4.5362, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765273725843721e-05, | |
| "loss": 4.5377, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764435131092669e-05, | |
| "loss": 4.5251, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763596536341617e-05, | |
| "loss": 4.5193, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762757941590565e-05, | |
| "loss": 4.5234, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761919346839513e-05, | |
| "loss": 4.5262, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761080752088461e-05, | |
| "loss": 4.532, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760243795217783e-05, | |
| "loss": 4.5272, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.759405200466731e-05, | |
| "loss": 4.5198, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758566605715679e-05, | |
| "loss": 4.5121, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757728010964627e-05, | |
| "loss": 4.5202, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756889416213575e-05, | |
| "loss": 4.5129, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.756052459342896e-05, | |
| "loss": 4.5191, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.755213864591844e-05, | |
| "loss": 4.5135, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543769077211646e-05, | |
| "loss": 4.5083, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7535383129701126e-05, | |
| "loss": 4.5212, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7526997182190606e-05, | |
| "loss": 4.5073, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518611234680086e-05, | |
| "loss": 4.502, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510225287169566e-05, | |
| "loss": 4.5065, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7501839339659046e-05, | |
| "loss": 4.5023, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.472645282745361, | |
| "eval_runtime": 305.2704, | |
| "eval_samples_per_second": 1250.01, | |
| "eval_steps_per_second": 39.064, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7493453392148526e-05, | |
| "loss": 4.4888, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.748506744463801e-05, | |
| "loss": 4.5014, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.747668149712749e-05, | |
| "loss": 4.5164, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.746829554961697e-05, | |
| "loss": 4.496, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7459909602106446e-05, | |
| "loss": 4.5078, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451523654595926e-05, | |
| "loss": 4.4881, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7443137707085406e-05, | |
| "loss": 4.5028, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434751759574886e-05, | |
| "loss": 4.4786, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7426365812064366e-05, | |
| "loss": 4.4957, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7417979864553846e-05, | |
| "loss": 4.4958, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7409593917043326e-05, | |
| "loss": 4.4962, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7401207969532806e-05, | |
| "loss": 4.5029, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7392838400826015e-05, | |
| "loss": 4.4784, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7384452453315495e-05, | |
| "loss": 4.4802, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7376066505804975e-05, | |
| "loss": 4.4765, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736768055829446e-05, | |
| "loss": 4.4858, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735929461078394e-05, | |
| "loss": 4.4903, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735090866327342e-05, | |
| "loss": 4.4831, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.73425227157629e-05, | |
| "loss": 4.4865, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733415314705611e-05, | |
| "loss": 4.5014, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732576719954559e-05, | |
| "loss": 4.4771, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.731738125203507e-05, | |
| "loss": 4.4741, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730899530452455e-05, | |
| "loss": 4.4784, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730060935701403e-05, | |
| "loss": 4.4904, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729222340950351e-05, | |
| "loss": 4.4725, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.728383746199298e-05, | |
| "loss": 4.4717, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727545151448246e-05, | |
| "loss": 4.4726, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726706556697194e-05, | |
| "loss": 4.4727, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725867961946143e-05, | |
| "loss": 4.4563, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725029367195091e-05, | |
| "loss": 4.4648, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724192410324412e-05, | |
| "loss": 4.4642, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7233554534537335e-05, | |
| "loss": 4.4673, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722516858702681e-05, | |
| "loss": 4.4796, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721678263951629e-05, | |
| "loss": 4.4591, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720839669200577e-05, | |
| "loss": 4.4628, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720001074449525e-05, | |
| "loss": 4.471, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719162479698473e-05, | |
| "loss": 4.4538, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.718323884947421e-05, | |
| "loss": 4.4527, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717485290196369e-05, | |
| "loss": 4.4473, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71664833332569e-05, | |
| "loss": 4.456, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7158097385746384e-05, | |
| "loss": 4.4456, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7149711438235864e-05, | |
| "loss": 4.4649, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7141325490725343e-05, | |
| "loss": 4.4425, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.713295592201855e-05, | |
| "loss": 4.4596, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.712456997450803e-05, | |
| "loss": 4.4509, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.711618402699751e-05, | |
| "loss": 4.4499, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710779807948699e-05, | |
| "loss": 4.4553, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709941213197647e-05, | |
| "loss": 4.4391, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709104256326968e-05, | |
| "loss": 4.4324, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.708265661575916e-05, | |
| "loss": 4.4544, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.707427066824864e-05, | |
| "loss": 4.4456, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706588472073812e-05, | |
| "loss": 4.436, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705751515203134e-05, | |
| "loss": 4.4384, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704912920452082e-05, | |
| "loss": 4.4441, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.70407432570103e-05, | |
| "loss": 4.4264, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7032373688303506e-05, | |
| "loss": 4.4492, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7024004119596715e-05, | |
| "loss": 4.434, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7015618172086195e-05, | |
| "loss": 4.4322, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7007232224575675e-05, | |
| "loss": 4.4442, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6998846277065155e-05, | |
| "loss": 4.4233, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6990460329554635e-05, | |
| "loss": 4.4198, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6982074382044115e-05, | |
| "loss": 4.4338, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6973688434533595e-05, | |
| "loss": 4.4162, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6965318865826804e-05, | |
| "loss": 4.4193, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695693291831629e-05, | |
| "loss": 4.4328, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694854697080577e-05, | |
| "loss": 4.437, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694016102329525e-05, | |
| "loss": 4.4213, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693177507578473e-05, | |
| "loss": 4.4199, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692338912827421e-05, | |
| "loss": 4.4094, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.691500318076369e-05, | |
| "loss": 4.4329, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690661723325317e-05, | |
| "loss": 4.4229, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6898231285742644e-05, | |
| "loss": 4.4303, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6889845338232124e-05, | |
| "loss": 4.4243, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6881459390721604e-05, | |
| "loss": 4.4337, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6873073443211084e-05, | |
| "loss": 4.4358, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686470387450429e-05, | |
| "loss": 4.4102, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.685631792699377e-05, | |
| "loss": 4.4197, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.684793197948325e-05, | |
| "loss": 4.4245, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683956241077647e-05, | |
| "loss": 4.4052, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683117646326595e-05, | |
| "loss": 4.4317, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6822806894559164e-05, | |
| "loss": 4.421, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6814420947048644e-05, | |
| "loss": 4.4261, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.680603499953812e-05, | |
| "loss": 4.426, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.67976490520276e-05, | |
| "loss": 4.4003, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678926310451708e-05, | |
| "loss": 4.4164, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678087715700656e-05, | |
| "loss": 4.4129, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677249120949604e-05, | |
| "loss": 4.4196, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676410526198552e-05, | |
| "loss": 4.4126, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675575207208246e-05, | |
| "loss": 4.4154, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674736612457194e-05, | |
| "loss": 4.4127, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.673898017706142e-05, | |
| "loss": 4.4098, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.67305942295509e-05, | |
| "loss": 4.3974, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.672220828204038e-05, | |
| "loss": 4.4032, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.671382233452986e-05, | |
| "loss": 4.4094, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.670543638701934e-05, | |
| "loss": 4.4107, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669705043950882e-05, | |
| "loss": 4.4053, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66886644919983e-05, | |
| "loss": 4.3927, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668027854448778e-05, | |
| "loss": 4.3971, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.667189259697726e-05, | |
| "loss": 4.4085, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666350664946674e-05, | |
| "loss": 4.4038, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665512070195622e-05, | |
| "loss": 4.3996, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664675113324943e-05, | |
| "loss": 4.393, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663836518573891e-05, | |
| "loss": 4.3843, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662997923822839e-05, | |
| "loss": 4.4016, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662159329071788e-05, | |
| "loss": 4.3779, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661320734320736e-05, | |
| "loss": 4.3935, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6604837774500567e-05, | |
| "loss": 4.386, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6596451826990047e-05, | |
| "loss": 4.3995, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6588082258283256e-05, | |
| "loss": 4.3871, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6579696310772736e-05, | |
| "loss": 4.3977, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6571310363262216e-05, | |
| "loss": 4.3839, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6562924415751695e-05, | |
| "loss": 4.3826, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6554538468241175e-05, | |
| "loss": 4.3893, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654615252073065e-05, | |
| "loss": 4.3769, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.653776657322013e-05, | |
| "loss": 4.3971, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6529380625709615e-05, | |
| "loss": 4.392, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6520994678199095e-05, | |
| "loss": 4.3912, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6512608730688575e-05, | |
| "loss": 4.3814, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6504222783178055e-05, | |
| "loss": 4.3879, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6495836835667535e-05, | |
| "loss": 4.3919, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6487467266960744e-05, | |
| "loss": 4.3723, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6479081319450224e-05, | |
| "loss": 4.3961, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6470695371939704e-05, | |
| "loss": 4.3849, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.646232580323291e-05, | |
| "loss": 4.3839, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.645393985572239e-05, | |
| "loss": 4.3747, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64455702870156e-05, | |
| "loss": 4.3766, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.643718433950508e-05, | |
| "loss": 4.3608, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642879839199457e-05, | |
| "loss": 4.3726, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642041244448405e-05, | |
| "loss": 4.367, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.641202649697353e-05, | |
| "loss": 4.3829, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640364054946301e-05, | |
| "loss": 4.3872, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639525460195249e-05, | |
| "loss": 4.3748, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638686865444197e-05, | |
| "loss": 4.3702, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637848270693145e-05, | |
| "loss": 4.3727, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637009675942093e-05, | |
| "loss": 4.3785, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636172719071414e-05, | |
| "loss": 4.3823, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635334124320362e-05, | |
| "loss": 4.3796, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63449552956931e-05, | |
| "loss": 4.3683, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633656934818258e-05, | |
| "loss": 4.3729, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632818340067206e-05, | |
| "loss": 4.3772, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631979745316154e-05, | |
| "loss": 4.3633, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631141150565102e-05, | |
| "loss": 4.3731, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.63030255581405e-05, | |
| "loss": 4.3663, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.629465598943371e-05, | |
| "loss": 4.3635, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.628627004192319e-05, | |
| "loss": 4.3768, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.62779004732164e-05, | |
| "loss": 4.3648, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626951452570588e-05, | |
| "loss": 4.358, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626112857819536e-05, | |
| "loss": 4.3626, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6252742630684835e-05, | |
| "loss": 4.3574, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.3384690284729, | |
| "eval_runtime": 294.8635, | |
| "eval_samples_per_second": 1294.128, | |
| "eval_steps_per_second": 40.442, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6244356683174315e-05, | |
| "loss": 4.3548, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6235970735663795e-05, | |
| "loss": 4.3566, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6227584788153275e-05, | |
| "loss": 4.377, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6219215219446484e-05, | |
| "loss": 4.3561, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621082927193597e-05, | |
| "loss": 4.373, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620244332442545e-05, | |
| "loss": 4.3465, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.619405737691493e-05, | |
| "loss": 4.365, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.618567142940441e-05, | |
| "loss": 4.3426, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617730186069762e-05, | |
| "loss": 4.3597, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61689159131871e-05, | |
| "loss": 4.3622, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616052996567658e-05, | |
| "loss": 4.359, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.615214401816606e-05, | |
| "loss": 4.3641, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.614377444945927e-05, | |
| "loss": 4.3448, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613538850194875e-05, | |
| "loss": 4.3453, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.612700255443823e-05, | |
| "loss": 4.3442, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611861660692771e-05, | |
| "loss": 4.3475, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611023065941719e-05, | |
| "loss": 4.3572, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.610184471190667e-05, | |
| "loss": 4.3523, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6093458764396156e-05, | |
| "loss": 4.3519, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6085072816885636e-05, | |
| "loss": 4.3677, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6076686869375116e-05, | |
| "loss": 4.3497, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6068300921864595e-05, | |
| "loss": 4.3476, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6059914974354075e-05, | |
| "loss": 4.3504, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6051529026843555e-05, | |
| "loss": 4.3553, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6043159458136764e-05, | |
| "loss": 4.3404, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6034789889429974e-05, | |
| "loss": 4.3454, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6026403941919454e-05, | |
| "loss": 4.3447, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6018017994408933e-05, | |
| "loss": 4.338, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6009632046898413e-05, | |
| "loss": 4.332, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6001246099387893e-05, | |
| "loss": 4.3346, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599286015187737e-05, | |
| "loss": 4.3422, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.598449058317059e-05, | |
| "loss": 4.3409, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.597610463566007e-05, | |
| "loss": 4.3513, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.596771868814955e-05, | |
| "loss": 4.3421, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595934911944276e-05, | |
| "loss": 4.3298, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595096317193224e-05, | |
| "loss": 4.3424, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.594257722442172e-05, | |
| "loss": 4.3354, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.59341912769112e-05, | |
| "loss": 4.3259, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.592580532940067e-05, | |
| "loss": 4.3285, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.591741938189015e-05, | |
| "loss": 4.3286, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590903343437963e-05, | |
| "loss": 4.3262, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590064748686911e-05, | |
| "loss": 4.3421, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.589226153935859e-05, | |
| "loss": 4.3234, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.588387559184808e-05, | |
| "loss": 4.3368, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.587548964433756e-05, | |
| "loss": 4.3336, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.586710369682704e-05, | |
| "loss": 4.3261, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585873412812025e-05, | |
| "loss": 4.3361, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585034818060973e-05, | |
| "loss": 4.3186, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.584196223309921e-05, | |
| "loss": 4.3073, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5833592664392416e-05, | |
| "loss": 4.3397, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5825206716881896e-05, | |
| "loss": 4.3269, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5816820769371376e-05, | |
| "loss": 4.3223, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5808434821860856e-05, | |
| "loss": 4.3179, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5800048874350336e-05, | |
| "loss": 4.3233, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5791662926839816e-05, | |
| "loss": 4.3123, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5783293358133025e-05, | |
| "loss": 4.3285, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577490741062251e-05, | |
| "loss": 4.3223, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576653784191572e-05, | |
| "loss": 4.3164, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.57581518944052e-05, | |
| "loss": 4.3273, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574976594689468e-05, | |
| "loss": 4.3074, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.574137999938416e-05, | |
| "loss": 4.306, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.573299405187364e-05, | |
| "loss": 4.3237, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.572460810436312e-05, | |
| "loss": 4.2981, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.57162221568526e-05, | |
| "loss": 4.3026, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.570783620934208e-05, | |
| "loss": 4.3214, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569946664063529e-05, | |
| "loss": 4.3242, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569108069312477e-05, | |
| "loss": 4.3048, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568269474561425e-05, | |
| "loss": 4.3148, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.567430879810373e-05, | |
| "loss": 4.2965, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.566592285059321e-05, | |
| "loss": 4.3194, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5657553281886425e-05, | |
| "loss": 4.3116, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5649167334375905e-05, | |
| "loss": 4.3209, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5640781386865385e-05, | |
| "loss": 4.3116, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563239543935486e-05, | |
| "loss": 4.3227, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5624025870648074e-05, | |
| "loss": 4.3267, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5615639923137554e-05, | |
| "loss": 4.2964, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5607253975627034e-05, | |
| "loss": 4.3151, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559886802811651e-05, | |
| "loss": 4.3173, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559048208060599e-05, | |
| "loss": 4.2891, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55821125118992e-05, | |
| "loss": 4.3259, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.557372656438868e-05, | |
| "loss": 4.313, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556534061687816e-05, | |
| "loss": 4.3144, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.555695466936764e-05, | |
| "loss": 4.3197, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554858510066086e-05, | |
| "loss": 4.296, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554019915315033e-05, | |
| "loss": 4.3025, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553181320563981e-05, | |
| "loss": 4.3081, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552342725812929e-05, | |
| "loss": 4.3118, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.551504131061877e-05, | |
| "loss": 4.3056, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.550667174191198e-05, | |
| "loss": 4.3105, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549828579440146e-05, | |
| "loss": 4.3004, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548989984689094e-05, | |
| "loss": 4.3101, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548151389938042e-05, | |
| "loss": 4.291, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.54731279518699e-05, | |
| "loss": 4.2996, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.546474200435939e-05, | |
| "loss": 4.3067, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.545635605684887e-05, | |
| "loss": 4.308, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.544797010933835e-05, | |
| "loss": 4.3018, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543958416182783e-05, | |
| "loss": 4.2878, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5431214593121036e-05, | |
| "loss": 4.2928, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5422828645610516e-05, | |
| "loss": 4.3021, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5414442698099996e-05, | |
| "loss": 4.3034, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406056750589476e-05, | |
| "loss": 4.2988, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5397687181882685e-05, | |
| "loss": 4.2899, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389301234372165e-05, | |
| "loss": 4.2845, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5380915286861645e-05, | |
| "loss": 4.2984, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5372529339351125e-05, | |
| "loss": 4.2797, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.536415977064434e-05, | |
| "loss": 4.2903, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.535579020193755e-05, | |
| "loss": 4.2824, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.534742063323076e-05, | |
| "loss": 4.2965, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533903468572024e-05, | |
| "loss": 4.2876, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.533064873820972e-05, | |
| "loss": 4.3011, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.53222627906992e-05, | |
| "loss": 4.2802, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.531387684318868e-05, | |
| "loss": 4.2854, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530549089567816e-05, | |
| "loss": 4.2881, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.529710494816764e-05, | |
| "loss": 4.2833, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528871900065712e-05, | |
| "loss": 4.293, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52803330531466e-05, | |
| "loss": 4.2978, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527196348443981e-05, | |
| "loss": 4.2911, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5263577536929295e-05, | |
| "loss": 4.2828, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5255191589418775e-05, | |
| "loss": 4.2891, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5246805641908255e-05, | |
| "loss": 4.2932, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5238419694397735e-05, | |
| "loss": 4.277, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5230033746887215e-05, | |
| "loss": 4.2983, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5221647799376694e-05, | |
| "loss": 4.2838, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5213278230669904e-05, | |
| "loss": 4.2902, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5204892283159384e-05, | |
| "loss": 4.2808, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5196506335648863e-05, | |
| "loss": 4.2805, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5188120388138343e-05, | |
| "loss": 4.2648, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517976719823528e-05, | |
| "loss": 4.2822, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517138125072476e-05, | |
| "loss": 4.271, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.516299530321425e-05, | |
| "loss": 4.2839, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.515460935570373e-05, | |
| "loss": 4.2885, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514622340819321e-05, | |
| "loss": 4.2865, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513783746068269e-05, | |
| "loss": 4.2719, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512945151317217e-05, | |
| "loss": 4.2827, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512106556566164e-05, | |
| "loss": 4.2871, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511267961815112e-05, | |
| "loss": 4.2871, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.51042936706406e-05, | |
| "loss": 4.2858, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509590772313008e-05, | |
| "loss": 4.2776, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.508752177561956e-05, | |
| "loss": 4.2765, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507915220691277e-05, | |
| "loss": 4.286, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5070782638205986e-05, | |
| "loss": 4.2706, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5062396690695466e-05, | |
| "loss": 4.2813, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5054010743184946e-05, | |
| "loss": 4.2718, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5045624795674426e-05, | |
| "loss": 4.2755, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5037238848163906e-05, | |
| "loss": 4.2866, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5028852900653386e-05, | |
| "loss": 4.2734, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5020466953142866e-05, | |
| "loss": 4.2636, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5012081005632346e-05, | |
| "loss": 4.2698, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5003711436925555e-05, | |
| "loss": 4.2706, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.256267547607422, | |
| "eval_runtime": 295.3709, | |
| "eval_samples_per_second": 1291.905, | |
| "eval_steps_per_second": 40.373, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4995325489415035e-05, | |
| "loss": 4.2581, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4986939541904515e-05, | |
| "loss": 4.2676, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4978553594393995e-05, | |
| "loss": 4.2837, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4970167646883475e-05, | |
| "loss": 4.2673, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4961781699372955e-05, | |
| "loss": 4.2813, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4953395751862435e-05, | |
| "loss": 4.2644, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4945009804351915e-05, | |
| "loss": 4.2707, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4936623856841395e-05, | |
| "loss": 4.2596, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.492823790933088e-05, | |
| "loss": 4.2662, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4919851961820354e-05, | |
| "loss": 4.2726, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4911466014309834e-05, | |
| "loss": 4.2713, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4903080066799314e-05, | |
| "loss": 4.2766, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.489471049809253e-05, | |
| "loss": 4.256, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4886324550582003e-05, | |
| "loss": 4.2596, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4877938603071483e-05, | |
| "loss": 4.2573, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486955265556096e-05, | |
| "loss": 4.2586, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486116670805044e-05, | |
| "loss": 4.2709, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.485278076053992e-05, | |
| "loss": 4.2635, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48443948130294e-05, | |
| "loss": 4.2673, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.483600886551888e-05, | |
| "loss": 4.2822, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.482762291800836e-05, | |
| "loss": 4.2681, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481923697049785e-05, | |
| "loss": 4.263, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481085102298733e-05, | |
| "loss": 4.2623, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.480248145428054e-05, | |
| "loss": 4.2674, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.479409550677002e-05, | |
| "loss": 4.2559, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.47857095592595e-05, | |
| "loss": 4.2618, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.477732361174898e-05, | |
| "loss": 4.2574, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476893766423846e-05, | |
| "loss": 4.2559, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476055171672794e-05, | |
| "loss": 4.2499, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.475216576921742e-05, | |
| "loss": 4.2528, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.474377982170689e-05, | |
| "loss": 4.2546, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.473541025300011e-05, | |
| "loss": 4.2596, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.472702430548959e-05, | |
| "loss": 4.2674, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4718654736782804e-05, | |
| "loss": 4.259, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4710268789272283e-05, | |
| "loss": 4.248, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4701882841761763e-05, | |
| "loss": 4.2617, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.469351327305497e-05, | |
| "loss": 4.2589, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.468512732554445e-05, | |
| "loss": 4.2381, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.467674137803393e-05, | |
| "loss": 4.2431, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.466835543052341e-05, | |
| "loss": 4.2488, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.465996948301289e-05, | |
| "loss": 4.2423, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4651583535502366e-05, | |
| "loss": 4.262, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4643197587991846e-05, | |
| "loss": 4.2427, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4634811640481325e-05, | |
| "loss": 4.2524, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4626425692970805e-05, | |
| "loss": 4.2519, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.461805612426402e-05, | |
| "loss": 4.25, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.46096701767535e-05, | |
| "loss": 4.2536, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.460128422924298e-05, | |
| "loss": 4.2388, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.459291466053619e-05, | |
| "loss": 4.2238, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.458452871302567e-05, | |
| "loss": 4.263, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.457614276551515e-05, | |
| "loss": 4.2439, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.456775681800463e-05, | |
| "loss": 4.2447, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455937087049411e-05, | |
| "loss": 4.2415, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455098492298359e-05, | |
| "loss": 4.2385, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.454259897547307e-05, | |
| "loss": 4.2356, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.453421302796255e-05, | |
| "loss": 4.2451, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.452582708045203e-05, | |
| "loss": 4.2429, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.451745751174524e-05, | |
| "loss": 4.2377, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450907156423472e-05, | |
| "loss": 4.2539, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4500685616724206e-05, | |
| "loss": 4.2268, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4492299669213686e-05, | |
| "loss": 4.2249, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4483913721703166e-05, | |
| "loss": 4.2511, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4475527774192646e-05, | |
| "loss": 4.217, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4467141826682126e-05, | |
| "loss": 4.2242, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4458755879171606e-05, | |
| "loss": 4.2462, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4450386310464815e-05, | |
| "loss": 4.2469, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4442000362954295e-05, | |
| "loss": 4.2293, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4433630794247504e-05, | |
| "loss": 4.2388, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4425244846736984e-05, | |
| "loss": 4.22, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4416858899226464e-05, | |
| "loss": 4.2412, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4408472951715944e-05, | |
| "loss": 4.2329, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4400087004205423e-05, | |
| "loss": 4.2418, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4391701056694903e-05, | |
| "loss": 4.2355, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.438333148798812e-05, | |
| "loss": 4.2461, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.43749455404776e-05, | |
| "loss": 4.25, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.436655959296708e-05, | |
| "loss": 4.2234, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435817364545655e-05, | |
| "loss": 4.2345, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434978769794603e-05, | |
| "loss": 4.2424, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434140175043551e-05, | |
| "loss": 4.2178, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.433303218172873e-05, | |
| "loss": 4.2466, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.43246462342182e-05, | |
| "loss": 4.243, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.431626028670768e-05, | |
| "loss": 4.2419, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.430787433919716e-05, | |
| "loss": 4.2455, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.429948839168664e-05, | |
| "loss": 4.2227, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.429111882297986e-05, | |
| "loss": 4.2271, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.428273287546934e-05, | |
| "loss": 4.2341, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.427434692795882e-05, | |
| "loss": 4.2411, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.42659609804483e-05, | |
| "loss": 4.2344, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4257591411741506e-05, | |
| "loss": 4.2351, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4249205464230986e-05, | |
| "loss": 4.224, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4240819516720466e-05, | |
| "loss": 4.2384, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4232433569209946e-05, | |
| "loss": 4.2247, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4224047621699426e-05, | |
| "loss": 4.2216, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4215678052992635e-05, | |
| "loss": 4.232, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4207292105482115e-05, | |
| "loss": 4.2364, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4198906157971595e-05, | |
| "loss": 4.2287, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419052021046108e-05, | |
| "loss": 4.2188, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.418213426295056e-05, | |
| "loss": 4.2201, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.417374831544004e-05, | |
| "loss": 4.2284, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.416536236792952e-05, | |
| "loss": 4.2347, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4156976420419e-05, | |
| "loss": 4.2249, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414860685171221e-05, | |
| "loss": 4.2233, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414023728300542e-05, | |
| "loss": 4.2144, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.41318513354949e-05, | |
| "loss": 4.2224, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.412346538798438e-05, | |
| "loss": 4.2112, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.411507944047386e-05, | |
| "loss": 4.2228, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.410670987176707e-05, | |
| "loss": 4.205, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409832392425655e-05, | |
| "loss": 4.2343, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.408993797674603e-05, | |
| "loss": 4.2191, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4081552029235515e-05, | |
| "loss": 4.2247, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4073166081724995e-05, | |
| "loss": 4.213, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4064796513018204e-05, | |
| "loss": 4.216, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4056410565507684e-05, | |
| "loss": 4.2194, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4048024617997164e-05, | |
| "loss": 4.2158, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4039638670486644e-05, | |
| "loss": 4.2234, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4031252722976124e-05, | |
| "loss": 4.2266, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4022866775465604e-05, | |
| "loss": 4.2216, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4014480827955084e-05, | |
| "loss": 4.2156, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.400609488044456e-05, | |
| "loss": 4.2192, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.399770893293404e-05, | |
| "loss": 4.2278, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398933936422725e-05, | |
| "loss": 4.203, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398095341671673e-05, | |
| "loss": 4.2292, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.397256746920621e-05, | |
| "loss": 4.2138, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.396419790049943e-05, | |
| "loss": 4.2234, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.395581195298891e-05, | |
| "loss": 4.2116, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.394742600547839e-05, | |
| "loss": 4.215, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.393904005796786e-05, | |
| "loss": 4.2007, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.393067048926108e-05, | |
| "loss": 4.2112, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.392228454175056e-05, | |
| "loss": 4.2075, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.391389859424003e-05, | |
| "loss": 4.2145, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.390551264672951e-05, | |
| "loss": 4.2175, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.389712669921899e-05, | |
| "loss": 4.2218, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.388875713051221e-05, | |
| "loss": 4.2039, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.388037118300169e-05, | |
| "loss": 4.2151, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.387198523549117e-05, | |
| "loss": 4.2217, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3863599287980647e-05, | |
| "loss": 4.2194, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.385522971927386e-05, | |
| "loss": 4.2157, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3846843771763336e-05, | |
| "loss": 4.2186, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3838457824252816e-05, | |
| "loss": 4.2075, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3830071876742296e-05, | |
| "loss": 4.2204, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3821685929231775e-05, | |
| "loss": 4.2028, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3813299981721255e-05, | |
| "loss": 4.2169, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3804914034210735e-05, | |
| "loss": 4.2069, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3796528086700215e-05, | |
| "loss": 4.2088, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3788158517993424e-05, | |
| "loss": 4.2204, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3779772570482904e-05, | |
| "loss": 4.2112, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.377138662297239e-05, | |
| "loss": 4.2026, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.376300067546187e-05, | |
| "loss": 4.1998, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.375463110675508e-05, | |
| "loss": 4.2067, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.20039701461792, | |
| "eval_runtime": 287.3669, | |
| "eval_samples_per_second": 1327.888, | |
| "eval_steps_per_second": 41.497, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.374624515924456e-05, | |
| "loss": 4.2019, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.373785921173404e-05, | |
| "loss": 4.201, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372947326422352e-05, | |
| "loss": 4.2209, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372110369551673e-05, | |
| "loss": 4.2066, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.371271774800621e-05, | |
| "loss": 4.2166, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.370433180049569e-05, | |
| "loss": 4.2034, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.369594585298517e-05, | |
| "loss": 4.2021, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.368755990547465e-05, | |
| "loss": 4.1975, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3679206715571594e-05, | |
| "loss": 4.2002, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3670820768061074e-05, | |
| "loss": 4.2113, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3662434820550554e-05, | |
| "loss": 4.2104, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.365406525184376e-05, | |
| "loss": 4.2085, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.364569568313698e-05, | |
| "loss": 4.1986, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.363730973562645e-05, | |
| "loss": 4.1972, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.362892378811593e-05, | |
| "loss": 4.1959, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.362053784060541e-05, | |
| "loss": 4.194, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.361215189309489e-05, | |
| "loss": 4.2081, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.360376594558437e-05, | |
| "loss": 4.2025, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.359537999807385e-05, | |
| "loss": 4.2061, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.358699405056333e-05, | |
| "loss": 4.2175, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357860810305281e-05, | |
| "loss": 4.21, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.35702221555423e-05, | |
| "loss": 4.1957, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.356183620803178e-05, | |
| "loss": 4.2022, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.355345026052126e-05, | |
| "loss": 4.2056, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.354506431301074e-05, | |
| "loss": 4.1962, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.353667836550022e-05, | |
| "loss": 4.2002, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352829241798969e-05, | |
| "loss": 4.1965, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.351990647047917e-05, | |
| "loss": 4.1964, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.351152052296865e-05, | |
| "loss": 4.193, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.350313457545813e-05, | |
| "loss": 4.1885, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.349474862794761e-05, | |
| "loss": 4.1923, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3486395438044556e-05, | |
| "loss": 4.1992, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3478009490534036e-05, | |
| "loss": 4.2046, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3469623543023516e-05, | |
| "loss": 4.1992, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3461237595512996e-05, | |
| "loss": 4.1874, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3452851648002476e-05, | |
| "loss": 4.2039, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3444465700491956e-05, | |
| "loss": 4.1991, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3436079752981436e-05, | |
| "loss": 4.1751, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3427710184274645e-05, | |
| "loss": 4.191, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3419324236764125e-05, | |
| "loss": 4.1857, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3410938289253605e-05, | |
| "loss": 4.1852, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3402552341743085e-05, | |
| "loss": 4.2019, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3394166394232565e-05, | |
| "loss": 4.1846, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3385780446722045e-05, | |
| "loss": 4.189, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3377394499211525e-05, | |
| "loss": 4.1917, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3369008551701005e-05, | |
| "loss": 4.1904, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3360622604190485e-05, | |
| "loss": 4.1989, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.335226941428743e-05, | |
| "loss": 4.181, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.334388346677691e-05, | |
| "loss": 4.1602, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.333549751926639e-05, | |
| "loss": 4.2059, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.332711157175587e-05, | |
| "loss": 4.1842, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331872562424535e-05, | |
| "loss": 4.1923, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331033967673483e-05, | |
| "loss": 4.1792, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.330195372922431e-05, | |
| "loss": 4.1799, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.329356778171379e-05, | |
| "loss": 4.1811, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3285198213007e-05, | |
| "loss": 4.1828, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.327681226549648e-05, | |
| "loss": 4.1889, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326842631798596e-05, | |
| "loss": 4.178, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326004037047544e-05, | |
| "loss": 4.1968, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.325165442296492e-05, | |
| "loss": 4.1745, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.32432684754544e-05, | |
| "loss": 4.1648, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3234882527943885e-05, | |
| "loss": 4.1926, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.322649658043336e-05, | |
| "loss": 4.1646, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3218127011726574e-05, | |
| "loss": 4.1663, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3209741064216054e-05, | |
| "loss": 4.1875, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.320135511670553e-05, | |
| "loss": 4.1878, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.319298554799874e-05, | |
| "loss": 4.176, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.318459960048822e-05, | |
| "loss": 4.184, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.31762136529777e-05, | |
| "loss": 4.1624, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3167827705467176e-05, | |
| "loss": 4.1857, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3159441757956656e-05, | |
| "loss": 4.1771, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3151055810446136e-05, | |
| "loss": 4.1884, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.314266986293562e-05, | |
| "loss": 4.1801, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.31342839154251e-05, | |
| "loss": 4.1871, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.312591434671831e-05, | |
| "loss": 4.1941, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.311752839920779e-05, | |
| "loss": 4.1681, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310914245169727e-05, | |
| "loss": 4.181, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310077288299048e-05, | |
| "loss": 4.1848, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.309238693547996e-05, | |
| "loss": 4.1652, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.308400098796944e-05, | |
| "loss": 4.1883, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.307561504045892e-05, | |
| "loss": 4.1892, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.30672290929484e-05, | |
| "loss": 4.1844, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305884314543788e-05, | |
| "loss": 4.1978, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305045719792736e-05, | |
| "loss": 4.1671, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.304207125041684e-05, | |
| "loss": 4.1711, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3033701681710057e-05, | |
| "loss": 4.1799, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3025315734199536e-05, | |
| "loss": 4.1825, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3016929786689016e-05, | |
| "loss": 4.184, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3008543839178496e-05, | |
| "loss": 4.1787, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3000157891667976e-05, | |
| "loss": 4.1673, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2991771944157456e-05, | |
| "loss": 4.1836, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2983385996646936e-05, | |
| "loss": 4.1713, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2975000049136416e-05, | |
| "loss": 4.1687, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2966630480429625e-05, | |
| "loss": 4.176, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2958244532919105e-05, | |
| "loss": 4.1859, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2949858585408585e-05, | |
| "loss": 4.1745, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2941472637898065e-05, | |
| "loss": 4.1632, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2933103069191274e-05, | |
| "loss": 4.1672, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2924717121680754e-05, | |
| "loss": 4.176, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.291633117417024e-05, | |
| "loss": 4.1785, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.290796160546345e-05, | |
| "loss": 4.173, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289957565795293e-05, | |
| "loss": 4.1709, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289118971044241e-05, | |
| "loss": 4.1597, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.288280376293189e-05, | |
| "loss": 4.1667, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.287441781542136e-05, | |
| "loss": 4.163, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.286603186791084e-05, | |
| "loss": 4.1677, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.285766229920406e-05, | |
| "loss": 4.1537, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284927635169354e-05, | |
| "loss": 4.183, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284089040418301e-05, | |
| "loss": 4.1664, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.283250445667249e-05, | |
| "loss": 4.1711, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.282411850916198e-05, | |
| "loss": 4.1576, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.281573256165146e-05, | |
| "loss": 4.1717, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.280734661414094e-05, | |
| "loss": 4.1642, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.279896066663042e-05, | |
| "loss": 4.1621, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.279059109792363e-05, | |
| "loss": 4.1749, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.278220515041311e-05, | |
| "loss": 4.1786, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.277383558170632e-05, | |
| "loss": 4.1625, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.27654496341958e-05, | |
| "loss": 4.1747, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.275706368668528e-05, | |
| "loss": 4.161, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274867773917476e-05, | |
| "loss": 4.174, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2740291791664237e-05, | |
| "loss": 4.1572, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2731905844153717e-05, | |
| "loss": 4.1773, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2723519896643196e-05, | |
| "loss": 4.16, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2715133949132676e-05, | |
| "loss": 4.1731, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.270674800162216e-05, | |
| "loss": 4.1641, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269837843291537e-05, | |
| "loss": 4.1624, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.268999248540485e-05, | |
| "loss": 4.1465, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.268162291669806e-05, | |
| "loss": 4.1641, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.267323696918754e-05, | |
| "loss": 4.157, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.266485102167702e-05, | |
| "loss": 4.1653, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.26564650741665e-05, | |
| "loss": 4.1617, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264809550545971e-05, | |
| "loss": 4.1723, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.263970955794919e-05, | |
| "loss": 4.1555, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.263132361043867e-05, | |
| "loss": 4.1657, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.262293766292815e-05, | |
| "loss": 4.1696, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.261455171541763e-05, | |
| "loss": 4.1716, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.260616576790712e-05, | |
| "loss": 4.1661, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.25977798203966e-05, | |
| "loss": 4.1658, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258939387288608e-05, | |
| "loss": 4.1558, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258100792537555e-05, | |
| "loss": 4.1739, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2572638356668766e-05, | |
| "loss": 4.1532, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2564252409158246e-05, | |
| "loss": 4.1681, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2555866461647726e-05, | |
| "loss": 4.1565, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.25474805141372e-05, | |
| "loss": 4.1607, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2539110945430415e-05, | |
| "loss": 4.1688, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2530724997919895e-05, | |
| "loss": 4.1577, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2522339050409375e-05, | |
| "loss": 4.16, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2513969481702584e-05, | |
| "loss": 4.1485, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.250558353419207e-05, | |
| "loss": 4.1598, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.159285068511963, | |
| "eval_runtime": 289.1843, | |
| "eval_samples_per_second": 1319.543, | |
| "eval_steps_per_second": 41.237, | |
| "step": 457920 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 1.884044489295234e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |