|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6007802340702211, |
|
"eval_steps": 500, |
|
"global_step": 924, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0303955078125, |
|
"learning_rate": 4.2553191489361704e-07, |
|
"loss": 1.11, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0294189453125, |
|
"learning_rate": 8.510638297872341e-07, |
|
"loss": 0.9825, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.028076171875, |
|
"learning_rate": 1.276595744680851e-06, |
|
"loss": 1.0375, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.7021276595744682e-06, |
|
"loss": 1.042, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0311279296875, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 0.9769, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 2.553191489361702e-06, |
|
"loss": 0.9316, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0299072265625, |
|
"learning_rate": 2.978723404255319e-06, |
|
"loss": 1.0077, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 3.4042553191489363e-06, |
|
"loss": 1.0346, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0301513671875, |
|
"learning_rate": 3.8297872340425535e-06, |
|
"loss": 1.0193, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0267333984375, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 1.0297, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0291748046875, |
|
"learning_rate": 4.680851063829788e-06, |
|
"loss": 1.0868, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0296630859375, |
|
"learning_rate": 5.106382978723404e-06, |
|
"loss": 1.0641, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.028564453125, |
|
"learning_rate": 5.531914893617022e-06, |
|
"loss": 0.9389, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 5.957446808510638e-06, |
|
"loss": 1.033, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0283203125, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 1.0216, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0322265625, |
|
"learning_rate": 6.808510638297873e-06, |
|
"loss": 1.1086, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0322265625, |
|
"learning_rate": 7.234042553191491e-06, |
|
"loss": 1.0577, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.030517578125, |
|
"learning_rate": 7.659574468085107e-06, |
|
"loss": 1.0733, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0303955078125, |
|
"learning_rate": 8.085106382978723e-06, |
|
"loss": 0.9865, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0291748046875, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 1.0125, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 8.936170212765958e-06, |
|
"loss": 1.1245, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0260009765625, |
|
"learning_rate": 9.361702127659576e-06, |
|
"loss": 1.0024, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0306396484375, |
|
"learning_rate": 9.787234042553192e-06, |
|
"loss": 1.0131, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.0212765957446808e-05, |
|
"loss": 1.0171, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.0638297872340426e-05, |
|
"loss": 0.9613, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.1063829787234044e-05, |
|
"loss": 1.1312, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.1489361702127662e-05, |
|
"loss": 1.0187, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.1914893617021277e-05, |
|
"loss": 0.9934, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.2340425531914895e-05, |
|
"loss": 1.0872, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.2765957446808513e-05, |
|
"loss": 0.9591, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.3191489361702127e-05, |
|
"loss": 0.9589, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.3617021276595745e-05, |
|
"loss": 1.0093, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0400390625, |
|
"learning_rate": 1.4042553191489363e-05, |
|
"loss": 1.0195, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.4468085106382981e-05, |
|
"loss": 0.8936, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.4893617021276596e-05, |
|
"loss": 0.9958, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.045654296875, |
|
"learning_rate": 1.5319148936170214e-05, |
|
"loss": 0.9279, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.03662109375, |
|
"learning_rate": 1.5744680851063832e-05, |
|
"loss": 1.0153, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.04638671875, |
|
"learning_rate": 1.6170212765957446e-05, |
|
"loss": 0.9862, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.6595744680851064e-05, |
|
"loss": 1.0962, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 0.956, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.74468085106383e-05, |
|
"loss": 1.0559, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.7872340425531915e-05, |
|
"loss": 1.0014, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.8297872340425533e-05, |
|
"loss": 1.0252, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.872340425531915e-05, |
|
"loss": 0.9541, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.914893617021277e-05, |
|
"loss": 0.9603, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.9574468085106384e-05, |
|
"loss": 1.0601, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9919, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.9999977801976743e-05, |
|
"loss": 1.0247, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.999991120800551e-05, |
|
"loss": 0.9936, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 1.9999800218381958e-05, |
|
"loss": 1.0315, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.9999644833598836e-05, |
|
"loss": 0.9392, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.9999445054345993e-05, |
|
"loss": 1.0716, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.9999200881510366e-05, |
|
"loss": 0.9724, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.999891231617599e-05, |
|
"loss": 0.9966, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.9998579359623977e-05, |
|
"loss": 0.969, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.9998202013332525e-05, |
|
"loss": 0.972, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.043701171875, |
|
"learning_rate": 1.99977802789769e-05, |
|
"loss": 0.9705, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.999731415842944e-05, |
|
"loss": 1.002, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.039794921875, |
|
"learning_rate": 1.9996803653759534e-05, |
|
"loss": 0.9508, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.9996248767233616e-05, |
|
"loss": 0.9232, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.9995649501315172e-05, |
|
"loss": 1.0054, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 1.9995005858664696e-05, |
|
"loss": 0.9685, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9994317842139715e-05, |
|
"loss": 0.9313, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.0311279296875, |
|
"learning_rate": 1.9993585454794748e-05, |
|
"loss": 0.9463, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.0311279296875, |
|
"learning_rate": 1.9992808699881303e-05, |
|
"loss": 0.9049, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.0322265625, |
|
"learning_rate": 1.999198758084787e-05, |
|
"loss": 0.9088, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.9991122101339885e-05, |
|
"loss": 0.9369, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.9990212265199738e-05, |
|
"loss": 0.9902, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9989258076466743e-05, |
|
"loss": 0.9569, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.042724609375, |
|
"learning_rate": 1.998825953937712e-05, |
|
"loss": 0.9779, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.9987216658363983e-05, |
|
"loss": 0.9505, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.9986129438057306e-05, |
|
"loss": 0.9374, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 1.998499788328392e-05, |
|
"loss": 1.0086, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.9983821999067478e-05, |
|
"loss": 1.046, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.031005859375, |
|
"learning_rate": 1.998260179062844e-05, |
|
"loss": 0.9375, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.032958984375, |
|
"learning_rate": 1.9981337263384057e-05, |
|
"loss": 0.9514, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 1.9980028422948323e-05, |
|
"loss": 0.8629, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.03125, |
|
"learning_rate": 1.9978675275131975e-05, |
|
"loss": 0.933, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.0299072265625, |
|
"learning_rate": 1.9977277825942453e-05, |
|
"loss": 0.9408, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.031494140625, |
|
"learning_rate": 1.997583608158388e-05, |
|
"loss": 1.0041, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 1.997435004845703e-05, |
|
"loss": 0.9605, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.03271484375, |
|
"learning_rate": 1.99728197331593e-05, |
|
"loss": 0.9256, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.9971245142484693e-05, |
|
"loss": 1.0026, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.031494140625, |
|
"learning_rate": 1.996962628342376e-05, |
|
"loss": 0.9789, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.99679631631636e-05, |
|
"loss": 0.9437, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.030517578125, |
|
"learning_rate": 1.996625578908781e-05, |
|
"loss": 0.9487, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.9964504168776454e-05, |
|
"loss": 0.9645, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.03271484375, |
|
"learning_rate": 1.9962708310006032e-05, |
|
"loss": 0.9967, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.0296630859375, |
|
"learning_rate": 1.996086822074945e-05, |
|
"loss": 1.0195, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.030517578125, |
|
"learning_rate": 1.9958983909175977e-05, |
|
"loss": 0.8769, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.031494140625, |
|
"learning_rate": 1.995705538365121e-05, |
|
"loss": 0.8407, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.995508265273704e-05, |
|
"loss": 0.9368, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 1.9953065725191613e-05, |
|
"loss": 0.9308, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.03076171875, |
|
"learning_rate": 1.9951004609969286e-05, |
|
"loss": 0.9235, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.032958984375, |
|
"learning_rate": 1.9948899316220603e-05, |
|
"loss": 0.9008, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.03173828125, |
|
"learning_rate": 1.9946749853292233e-05, |
|
"loss": 0.9735, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.994455623072694e-05, |
|
"loss": 0.9328, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.994231845826354e-05, |
|
"loss": 0.8967, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.03173828125, |
|
"learning_rate": 1.994003654583686e-05, |
|
"loss": 0.8363, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.993771050357769e-05, |
|
"loss": 0.9072, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9935340341812737e-05, |
|
"loss": 0.9502, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.03271484375, |
|
"learning_rate": 1.993292607106458e-05, |
|
"loss": 0.8794, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9930467702051632e-05, |
|
"loss": 0.9601, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9927965245688073e-05, |
|
"loss": 0.9099, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.033935546875, |
|
"learning_rate": 1.9925418713083824e-05, |
|
"loss": 0.929, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.992282811554448e-05, |
|
"loss": 0.9046, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.031005859375, |
|
"learning_rate": 1.9920193464571277e-05, |
|
"loss": 0.9393, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.9917514771861015e-05, |
|
"loss": 0.9933, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.035400390625, |
|
"learning_rate": 1.9914792049306034e-05, |
|
"loss": 0.8865, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.032958984375, |
|
"learning_rate": 1.9912025308994146e-05, |
|
"loss": 0.9158, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.990921456320859e-05, |
|
"loss": 0.9143, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9906359824427953e-05, |
|
"loss": 0.9707, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9903461105326155e-05, |
|
"loss": 0.8894, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9900518418772364e-05, |
|
"loss": 0.966, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.989753177783094e-05, |
|
"loss": 0.9201, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9894501195761393e-05, |
|
"loss": 0.9299, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9891426686018308e-05, |
|
"loss": 0.8812, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.9995, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 1.9885145938304905e-05, |
|
"loss": 0.8804, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.03955078125, |
|
"learning_rate": 1.988193972821863e-05, |
|
"loss": 0.9021, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.039306640625, |
|
"learning_rate": 1.987868964622676e-05, |
|
"loss": 0.8066, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 1.9875395706758388e-05, |
|
"loss": 0.909, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.987205792443729e-05, |
|
"loss": 0.8611, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.9868676314081907e-05, |
|
"loss": 0.9249, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.0322265625, |
|
"learning_rate": 1.986525089070525e-05, |
|
"loss": 0.837, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.986178166951484e-05, |
|
"loss": 0.8653, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.03662109375, |
|
"learning_rate": 1.9858268665912653e-05, |
|
"loss": 0.9011, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.9854711895495034e-05, |
|
"loss": 0.9942, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.032958984375, |
|
"learning_rate": 1.985111137405264e-05, |
|
"loss": 0.9303, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9847467117570364e-05, |
|
"loss": 0.9206, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.033935546875, |
|
"learning_rate": 1.9843779142227258e-05, |
|
"loss": 0.8366, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.9840047464396477e-05, |
|
"loss": 0.8988, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.06005859375, |
|
"learning_rate": 1.98362721006452e-05, |
|
"loss": 0.9719, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.983245306773454e-05, |
|
"loss": 0.9629, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.98285903826195e-05, |
|
"loss": 0.8384, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 1.9824684062448876e-05, |
|
"loss": 0.8031, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.982073412456518e-05, |
|
"loss": 0.8623, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.033935546875, |
|
"learning_rate": 1.981674058650458e-05, |
|
"loss": 0.8357, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.98127034659968e-05, |
|
"loss": 0.9306, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.9808622780965064e-05, |
|
"loss": 0.9464, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.033935546875, |
|
"learning_rate": 1.9804498549526e-05, |
|
"loss": 0.9146, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.980033078998956e-05, |
|
"loss": 0.8999, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.9796119520858957e-05, |
|
"loss": 0.9932, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.9791864760830554e-05, |
|
"loss": 0.8976, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.9787566528793806e-05, |
|
"loss": 0.9024, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.9783224843831162e-05, |
|
"loss": 0.8262, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.977883972521799e-05, |
|
"loss": 0.9491, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 1.9774411192422486e-05, |
|
"loss": 0.9347, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.9769939265105573e-05, |
|
"loss": 0.8401, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.976542396312085e-05, |
|
"loss": 0.8949, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.976086530651447e-05, |
|
"loss": 0.8675, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.0322265625, |
|
"learning_rate": 1.975626331552507e-05, |
|
"loss": 0.8617, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 1.9751618010583665e-05, |
|
"loss": 0.8374, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.974692941231357e-05, |
|
"loss": 0.8396, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.974219754153032e-05, |
|
"loss": 0.9553, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9737422419241538e-05, |
|
"loss": 0.8821, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9732604066646882e-05, |
|
"loss": 0.8778, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03955078125, |
|
"learning_rate": 1.9727742505137936e-05, |
|
"loss": 0.8552, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.9722837756298112e-05, |
|
"loss": 0.9358, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9717889841902553e-05, |
|
"loss": 0.9171, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 1.971289878391804e-05, |
|
"loss": 0.8395, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.035400390625, |
|
"learning_rate": 1.97078646045029e-05, |
|
"loss": 0.8955, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.9702787326006906e-05, |
|
"loss": 0.8192, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.9697666970971153e-05, |
|
"loss": 0.8264, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.9692503562128004e-05, |
|
"loss": 0.9093, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.9687297122400952e-05, |
|
"loss": 0.9446, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9682047674904527e-05, |
|
"loss": 0.8802, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.9676755242944202e-05, |
|
"loss": 0.9152, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.032958984375, |
|
"learning_rate": 1.9671419850016283e-05, |
|
"loss": 0.8396, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9666041519807802e-05, |
|
"loss": 0.7976, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.966062027619643e-05, |
|
"loss": 0.8979, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.032470703125, |
|
"learning_rate": 1.9655156143250328e-05, |
|
"loss": 0.8632, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.96496491452281e-05, |
|
"loss": 0.9456, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 1.9644099306578636e-05, |
|
"loss": 0.837, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 1.9638506651941024e-05, |
|
"loss": 0.7911, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.963287120614444e-05, |
|
"loss": 0.8926, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9627192994208038e-05, |
|
"loss": 0.8054, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 1.962147204134083e-05, |
|
"loss": 0.9226, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.035400390625, |
|
"learning_rate": 1.9615708372941588e-05, |
|
"loss": 0.8987, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.040283203125, |
|
"learning_rate": 1.960990201459872e-05, |
|
"loss": 0.8729, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.039306640625, |
|
"learning_rate": 1.960405299209016e-05, |
|
"loss": 0.9454, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.035888671875, |
|
"learning_rate": 1.9598161331383258e-05, |
|
"loss": 0.9157, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.9592227058634655e-05, |
|
"loss": 0.8724, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 1.958625020019018e-05, |
|
"loss": 0.8446, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.0341796875, |
|
"learning_rate": 1.9580230782584722e-05, |
|
"loss": 0.8441, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.957416883254211e-05, |
|
"loss": 0.9078, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9568064376975013e-05, |
|
"loss": 0.9075, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.956191744298479e-05, |
|
"loss": 0.8932, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.955572805786141e-05, |
|
"loss": 0.8577, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.9549496249083288e-05, |
|
"loss": 0.8257, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 1.954322204431719e-05, |
|
"loss": 0.7848, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.953690547141811e-05, |
|
"loss": 0.8617, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.953054655842913e-05, |
|
"loss": 0.7992, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.9524145333581315e-05, |
|
"loss": 0.8101, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.951770182529357e-05, |
|
"loss": 0.8669, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.03564453125, |
|
"learning_rate": 1.951121606217252e-05, |
|
"loss": 0.8589, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.9504688073012397e-05, |
|
"loss": 0.9205, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.039794921875, |
|
"learning_rate": 1.9498117886794885e-05, |
|
"loss": 0.9052, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9491505532689017e-05, |
|
"loss": 0.8167, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.03662109375, |
|
"learning_rate": 1.948485104005103e-05, |
|
"loss": 0.9358, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.947815443842424e-05, |
|
"loss": 0.8639, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 1.9471415757538918e-05, |
|
"loss": 0.8684, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.032470703125, |
|
"learning_rate": 1.946463502731213e-05, |
|
"loss": 0.7762, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.9457812277847645e-05, |
|
"loss": 0.8664, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.945094753943577e-05, |
|
"loss": 0.9964, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.944404084255324e-05, |
|
"loss": 0.8768, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.9437092217863043e-05, |
|
"loss": 0.8999, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.9430101696214335e-05, |
|
"loss": 0.8437, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.9423069308642267e-05, |
|
"loss": 0.8273, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 1.9415995086367858e-05, |
|
"loss": 0.9275, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.940887906079786e-05, |
|
"loss": 0.8938, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 1.9401721263524616e-05, |
|
"loss": 0.8414, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9394521726325907e-05, |
|
"loss": 0.9055, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.938728048116484e-05, |
|
"loss": 0.9002, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 1.9379997560189677e-05, |
|
"loss": 0.8598, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.9372672995733706e-05, |
|
"loss": 0.8557, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 1.9365306820315104e-05, |
|
"loss": 0.9001, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9357899066636774e-05, |
|
"loss": 0.842, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.935044976758621e-05, |
|
"loss": 0.8759, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.9342958956235365e-05, |
|
"loss": 0.8306, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.933542666584047e-05, |
|
"loss": 0.8322, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 1.9327852929841918e-05, |
|
"loss": 0.8149, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.03955078125, |
|
"learning_rate": 1.9320237781864106e-05, |
|
"loss": 0.8458, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.9312581255715276e-05, |
|
"loss": 0.84, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.038818359375, |
|
"learning_rate": 1.9304883385387383e-05, |
|
"loss": 0.8254, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 1.9297144205055925e-05, |
|
"loss": 0.8898, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.9289363749079798e-05, |
|
"loss": 0.8231, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.928154205200116e-05, |
|
"loss": 0.8764, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.9273679148545246e-05, |
|
"loss": 0.8436, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.9265775073620244e-05, |
|
"loss": 0.8622, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.9257829862317118e-05, |
|
"loss": 0.8484, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.9249843549909467e-05, |
|
"loss": 0.8765, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.9241816171853362e-05, |
|
"loss": 0.8762, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.03955078125, |
|
"learning_rate": 1.9233747763787187e-05, |
|
"loss": 0.8716, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.9225638361531482e-05, |
|
"loss": 0.8453, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9217488001088784e-05, |
|
"loss": 0.7992, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.920929671864348e-05, |
|
"loss": 0.9607, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.920106455056162e-05, |
|
"loss": 0.8416, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.9192791533390778e-05, |
|
"loss": 0.7983, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.040283203125, |
|
"learning_rate": 1.9184477703859876e-05, |
|
"loss": 0.8942, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 1.9176123098879035e-05, |
|
"loss": 0.8849, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.9167727755539393e-05, |
|
"loss": 0.83, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.9159291711112962e-05, |
|
"loss": 0.7999, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.035400390625, |
|
"learning_rate": 1.9150815003052436e-05, |
|
"loss": 0.8281, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.038818359375, |
|
"learning_rate": 1.9142297668991053e-05, |
|
"loss": 0.884, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.913373974674241e-05, |
|
"loss": 0.8701, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.039306640625, |
|
"learning_rate": 1.9125141274300293e-05, |
|
"loss": 0.8734, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.9116502289838524e-05, |
|
"loss": 0.8851, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.910782283171078e-05, |
|
"loss": 0.9402, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.038818359375, |
|
"learning_rate": 1.909910293845042e-05, |
|
"loss": 0.831, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.909034264877032e-05, |
|
"loss": 0.8093, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.039794921875, |
|
"learning_rate": 1.9081542001562713e-05, |
|
"loss": 0.9085, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.9072701035898985e-05, |
|
"loss": 0.8466, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.906381979102953e-05, |
|
"loss": 0.8938, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0400390625, |
|
"learning_rate": 1.9054898306383568e-05, |
|
"loss": 0.8787, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.040283203125, |
|
"learning_rate": 1.904593662156896e-05, |
|
"loss": 0.882, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.03759765625, |
|
"learning_rate": 1.903693477637204e-05, |
|
"loss": 0.7803, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 1.902789281075745e-05, |
|
"loss": 0.8078, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.9018810764867935e-05, |
|
"loss": 0.8318, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.8728, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.9000526593724678e-05, |
|
"loss": 0.836, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.8991324549645424e-05, |
|
"loss": 0.9197, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.898208258763987e-05, |
|
"loss": 0.7965, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.897280074873868e-05, |
|
"loss": 0.8078, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.8963479074149537e-05, |
|
"loss": 0.9035, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.8954117605257e-05, |
|
"loss": 0.8515, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.8944716383622288e-05, |
|
"loss": 0.8147, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.8935275450983102e-05, |
|
"loss": 0.8121, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.8925794849253462e-05, |
|
"loss": 0.858, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.8916274620523482e-05, |
|
"loss": 0.8502, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.8906714807059218e-05, |
|
"loss": 0.8438, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.889711545130246e-05, |
|
"loss": 0.8464, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.8887476595870558e-05, |
|
"loss": 0.8227, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.036865234375, |
|
"learning_rate": 1.887779828355621e-05, |
|
"loss": 0.8546, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 1.8868080557327305e-05, |
|
"loss": 0.8932, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.041748046875, |
|
"learning_rate": 1.8858323460326704e-05, |
|
"loss": 0.889, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.039306640625, |
|
"learning_rate": 1.8848527035872057e-05, |
|
"loss": 0.8174, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.038818359375, |
|
"learning_rate": 1.883869132745561e-05, |
|
"loss": 0.8183, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.8828816378744035e-05, |
|
"loss": 0.8924, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.8818902233578188e-05, |
|
"loss": 0.7906, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.04248046875, |
|
"learning_rate": 1.8808948935972965e-05, |
|
"loss": 0.8118, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.8798956530117058e-05, |
|
"loss": 0.8512, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.8788925060372806e-05, |
|
"loss": 0.8224, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.8778854571275972e-05, |
|
"loss": 0.8207, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.038330078125, |
|
"learning_rate": 1.876874510753554e-05, |
|
"loss": 0.8011, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.875859671403354e-05, |
|
"loss": 0.8132, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.874840943582482e-05, |
|
"loss": 0.9056, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.0400390625, |
|
"learning_rate": 1.8738183318136867e-05, |
|
"loss": 0.8353, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.872791840636961e-05, |
|
"loss": 0.7943, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.045654296875, |
|
"learning_rate": 1.871761474609519e-05, |
|
"loss": 0.8207, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.8707272383057785e-05, |
|
"loss": 0.8415, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 1.8696891363173405e-05, |
|
"loss": 0.797, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.8686471732529667e-05, |
|
"loss": 0.8248, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.8676013537385614e-05, |
|
"loss": 0.76, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.8665516824171497e-05, |
|
"loss": 0.8362, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.040771484375, |
|
"learning_rate": 1.865498163948858e-05, |
|
"loss": 0.8093, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.864440803010891e-05, |
|
"loss": 0.7735, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.863379604297513e-05, |
|
"loss": 0.8824, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.039794921875, |
|
"learning_rate": 1.862314572520028e-05, |
|
"loss": 0.8157, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 1.861245712406755e-05, |
|
"loss": 0.8084, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.86017302870301e-05, |
|
"loss": 0.7976, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.8590965261710856e-05, |
|
"loss": 0.8406, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.858016209590227e-05, |
|
"loss": 0.8145, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.038818359375, |
|
"learning_rate": 1.8569320837566128e-05, |
|
"loss": 0.8142, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 1.8558441534833327e-05, |
|
"loss": 0.8894, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.8547524236003675e-05, |
|
"loss": 0.8793, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0380859375, |
|
"learning_rate": 1.8536568989545662e-05, |
|
"loss": 0.868, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.8525575844096243e-05, |
|
"loss": 0.8572, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.8514544848460653e-05, |
|
"loss": 0.7933, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 1.8503476051612138e-05, |
|
"loss": 0.8017, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.042724609375, |
|
"learning_rate": 1.8492369502691785e-05, |
|
"loss": 0.8317, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 1.8481225251008284e-05, |
|
"loss": 0.8201, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.041748046875, |
|
"learning_rate": 1.8470043346037698e-05, |
|
"loss": 0.8258, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.8458823837423274e-05, |
|
"loss": 0.8402, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.8447566774975187e-05, |
|
"loss": 0.9293, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.8436272208670346e-05, |
|
"loss": 0.8716, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.842494018865216e-05, |
|
"loss": 0.8868, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.841357076523032e-05, |
|
"loss": 0.9027, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.840216398888057e-05, |
|
"loss": 0.7936, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.8390719910244487e-05, |
|
"loss": 0.8498, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.8379238580129256e-05, |
|
"loss": 0.798, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.836772004950744e-05, |
|
"loss": 0.8746, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.04541015625, |
|
"learning_rate": 1.8356164369516772e-05, |
|
"loss": 0.8658, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0400390625, |
|
"learning_rate": 1.834457159145989e-05, |
|
"loss": 0.8299, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.8332941766804152e-05, |
|
"loss": 0.8723, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.832127494718138e-05, |
|
"loss": 0.8311, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.830957118438764e-05, |
|
"loss": 0.8159, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.829783053038301e-05, |
|
"loss": 0.8351, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.8286053037291356e-05, |
|
"loss": 0.7679, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.8274238757400096e-05, |
|
"loss": 0.7848, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.826238774315995e-05, |
|
"loss": 0.8741, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.8250500047184744e-05, |
|
"loss": 0.8517, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.8238575722251144e-05, |
|
"loss": 0.8602, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.8226614821298444e-05, |
|
"loss": 0.8087, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.042724609375, |
|
"learning_rate": 1.821461739742831e-05, |
|
"loss": 0.8301, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.820258350390456e-05, |
|
"loss": 0.8342, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.043701171875, |
|
"learning_rate": 1.819051319415293e-05, |
|
"loss": 0.8249, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.041748046875, |
|
"learning_rate": 1.817840652176082e-05, |
|
"loss": 0.7909, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.04248046875, |
|
"learning_rate": 1.8166263540477068e-05, |
|
"loss": 0.8071, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.043212890625, |
|
"learning_rate": 1.815408430421171e-05, |
|
"loss": 0.7983, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.041748046875, |
|
"learning_rate": 1.8141868867035745e-05, |
|
"loss": 0.7877, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.8129617283180878e-05, |
|
"loss": 0.9056, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.043212890625, |
|
"learning_rate": 1.81173296070393e-05, |
|
"loss": 0.8708, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.04541015625, |
|
"learning_rate": 1.8105005893163436e-05, |
|
"loss": 0.8387, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.042724609375, |
|
"learning_rate": 1.8092646196265705e-05, |
|
"loss": 0.8578, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.042236328125, |
|
"learning_rate": 1.808025057121827e-05, |
|
"loss": 0.8642, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.8067819073052813e-05, |
|
"loss": 0.8058, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.8055351756960262e-05, |
|
"loss": 0.8128, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.804284867829058e-05, |
|
"loss": 0.8387, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.8030309892552488e-05, |
|
"loss": 0.9106, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.801773545541324e-05, |
|
"loss": 0.752, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.800512542269836e-05, |
|
"loss": 0.881, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.7992479850391416e-05, |
|
"loss": 0.8004, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.797979879463375e-05, |
|
"loss": 0.8075, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.796708231172423e-05, |
|
"loss": 0.8315, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.795433045811901e-05, |
|
"loss": 0.8506, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.7941543290431286e-05, |
|
"loss": 0.8314, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.792872086543103e-05, |
|
"loss": 0.7697, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.04541015625, |
|
"learning_rate": 1.7915863240044727e-05, |
|
"loss": 0.9001, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.7902970471355162e-05, |
|
"loss": 0.7685, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.7890042616601125e-05, |
|
"loss": 0.8105, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.7877079733177185e-05, |
|
"loss": 0.9061, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.043212890625, |
|
"learning_rate": 1.7864081878633414e-05, |
|
"loss": 0.813, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.785104911067515e-05, |
|
"loss": 0.8197, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.783798148716273e-05, |
|
"loss": 0.894, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.782487906611124e-05, |
|
"loss": 0.7809, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.781174190569024e-05, |
|
"loss": 0.8428, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.7798570064223536e-05, |
|
"loss": 0.8276, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04541015625, |
|
"learning_rate": 1.7785363600188894e-05, |
|
"loss": 0.7937, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.03955078125, |
|
"learning_rate": 1.7772122572217796e-05, |
|
"loss": 0.7835, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.7758847039095167e-05, |
|
"loss": 0.8456, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.774553705975913e-05, |
|
"loss": 0.8483, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.041259765625, |
|
"learning_rate": 1.773219269330073e-05, |
|
"loss": 0.7902, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04296875, |
|
"learning_rate": 1.7718813998963678e-05, |
|
"loss": 0.8734, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.7705401036144086e-05, |
|
"loss": 0.8646, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.7691953864390208e-05, |
|
"loss": 0.8005, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 1.7678472543402166e-05, |
|
"loss": 0.8701, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.7664957133031705e-05, |
|
"loss": 0.8099, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 1.7651407693281896e-05, |
|
"loss": 0.8524, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.7637824284306898e-05, |
|
"loss": 0.8456, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 1.762420696641167e-05, |
|
"loss": 0.7977, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.7610555800051727e-05, |
|
"loss": 0.7834, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.759687084583285e-05, |
|
"loss": 0.7946, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.7583152164510827e-05, |
|
"loss": 0.7456, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.7569399816991174e-05, |
|
"loss": 0.8358, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.7555613864328876e-05, |
|
"loss": 0.7976, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 1.754179436772812e-05, |
|
"loss": 0.9486, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.7527941388542006e-05, |
|
"loss": 0.7898, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.04150390625, |
|
"learning_rate": 1.751405498827228e-05, |
|
"loss": 0.7644, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.7500135228569067e-05, |
|
"loss": 0.8363, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.748618217123061e-05, |
|
"loss": 0.801, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.7472195878202955e-05, |
|
"loss": 0.8487, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.7458176411579715e-05, |
|
"loss": 0.8884, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.7444123833601784e-05, |
|
"loss": 0.8484, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.043701171875, |
|
"learning_rate": 1.743003820665705e-05, |
|
"loss": 0.8325, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.741591959328013e-05, |
|
"loss": 0.8061, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.7401768056152083e-05, |
|
"loss": 0.7888, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.7387583658100144e-05, |
|
"loss": 0.8564, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.737336646209742e-05, |
|
"loss": 0.8412, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.7359116531262654e-05, |
|
"loss": 0.9182, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.73448339288599e-05, |
|
"loss": 0.8653, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.7330518718298263e-05, |
|
"loss": 0.8174, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.05859375, |
|
"learning_rate": 1.7316170963131627e-05, |
|
"loss": 0.8621, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.7301790727058344e-05, |
|
"loss": 0.7991, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.728737807392098e-05, |
|
"loss": 0.8706, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.727293306770602e-05, |
|
"loss": 0.824, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.7258455772543573e-05, |
|
"loss": 0.9865, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.7243946252707115e-05, |
|
"loss": 0.844, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 1.7229404572613174e-05, |
|
"loss": 0.7566, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.721483079682106e-05, |
|
"loss": 0.8393, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.7200224990032577e-05, |
|
"loss": 0.7992, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.7185587217091727e-05, |
|
"loss": 0.8862, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.7170917542984445e-05, |
|
"loss": 0.8859, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.7156216032838275e-05, |
|
"loss": 0.8738, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.7141482751922117e-05, |
|
"loss": 0.8702, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.7126717765645908e-05, |
|
"loss": 0.8496, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.7111921139560356e-05, |
|
"loss": 0.8402, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.7097092939356622e-05, |
|
"loss": 0.8719, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.7082233230866064e-05, |
|
"loss": 0.865, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.7067342080059904e-05, |
|
"loss": 0.8876, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.7052419553048965e-05, |
|
"loss": 0.8594, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.041015625, |
|
"learning_rate": 1.703746571608337e-05, |
|
"loss": 0.7774, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.04638671875, |
|
"learning_rate": 1.7022480635552243e-05, |
|
"loss": 0.8357, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.700746437798342e-05, |
|
"loss": 0.8365, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.6992417010043144e-05, |
|
"loss": 0.7916, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.0439453125, |
|
"learning_rate": 1.6977338598535776e-05, |
|
"loss": 0.886, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.04638671875, |
|
"learning_rate": 1.696222921040351e-05, |
|
"loss": 0.8391, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.6947088912726054e-05, |
|
"loss": 0.8403, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.693191777272034e-05, |
|
"loss": 0.8048, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.6916715857740234e-05, |
|
"loss": 0.7742, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.690148323527623e-05, |
|
"loss": 0.7859, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.688621997295515e-05, |
|
"loss": 0.7956, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.6870926138539837e-05, |
|
"loss": 0.8672, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.060546875, |
|
"learning_rate": 1.6855601799928877e-05, |
|
"loss": 0.848, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.6840247025156272e-05, |
|
"loss": 0.8125, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.6824861882391154e-05, |
|
"loss": 0.8359, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.6809446439937472e-05, |
|
"loss": 0.877, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.6794000766233697e-05, |
|
"loss": 0.8408, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.6778524929852513e-05, |
|
"loss": 0.8381, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.676301899950052e-05, |
|
"loss": 0.782, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.674748304401791e-05, |
|
"loss": 0.8621, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.04345703125, |
|
"learning_rate": 1.673191713237819e-05, |
|
"loss": 0.8012, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.671632133368785e-05, |
|
"loss": 0.8245, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.670069571718607e-05, |
|
"loss": 0.7882, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.6685040352244414e-05, |
|
"loss": 0.8387, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 1.666935530836651e-05, |
|
"loss": 0.7766, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.665364065518775e-05, |
|
"loss": 0.8204, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.6637896462474986e-05, |
|
"loss": 0.8133, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 1.662212280012621e-05, |
|
"loss": 0.85, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.045654296875, |
|
"learning_rate": 1.660631973817024e-05, |
|
"loss": 0.8247, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.6590487346766426e-05, |
|
"loss": 0.8977, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.657462569620433e-05, |
|
"loss": 0.8456, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.6558734856903406e-05, |
|
"loss": 0.8369, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.04638671875, |
|
"learning_rate": 1.6542814899412694e-05, |
|
"loss": 0.8055, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.6526865894410526e-05, |
|
"loss": 0.8358, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.651088791270416e-05, |
|
"loss": 0.8094, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.6494881025229535e-05, |
|
"loss": 0.8518, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.647884530305089e-05, |
|
"loss": 0.9644, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.6462780817360502e-05, |
|
"loss": 0.8415, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.644668763947833e-05, |
|
"loss": 0.8764, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.6430565840851723e-05, |
|
"loss": 0.7737, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.641441549305509e-05, |
|
"loss": 0.7559, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.044677734375, |
|
"learning_rate": 1.6398236667789595e-05, |
|
"loss": 0.7893, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.045166015625, |
|
"learning_rate": 1.6382029436882826e-05, |
|
"loss": 0.8285, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 1.636579387228848e-05, |
|
"loss": 0.9, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.634953004608604e-05, |
|
"loss": 0.9457, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.6333238030480473e-05, |
|
"loss": 0.8015, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.056884765625, |
|
"learning_rate": 1.631691789780188e-05, |
|
"loss": 0.8978, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 1.6300569720505198e-05, |
|
"loss": 0.8787, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.6284193571169878e-05, |
|
"loss": 0.8, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.6267789522499545e-05, |
|
"loss": 0.7745, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.6251357647321685e-05, |
|
"loss": 0.8191, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.7503, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.6218410709370735e-05, |
|
"loss": 0.8587, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.6201895792869023e-05, |
|
"loss": 0.8287, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.6185353342401896e-05, |
|
"loss": 0.8017, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.6168783431411295e-05, |
|
"loss": 0.8638, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.6152186133461075e-05, |
|
"loss": 0.8111, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.6135561522236675e-05, |
|
"loss": 0.7919, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 1.6118909671544797e-05, |
|
"loss": 0.8731, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.6102230655313076e-05, |
|
"loss": 0.9006, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.6085524547589747e-05, |
|
"loss": 0.7621, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.6068791422543327e-05, |
|
"loss": 0.9065, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.6052031354462275e-05, |
|
"loss": 0.8989, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.6035244417754666e-05, |
|
"loss": 0.82, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.6018430686947865e-05, |
|
"loss": 0.8926, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.6001590236688187e-05, |
|
"loss": 0.8628, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.5984723141740578e-05, |
|
"loss": 0.8583, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.596782947698826e-05, |
|
"loss": 0.8314, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.058837890625, |
|
"learning_rate": 1.5950909317432436e-05, |
|
"loss": 0.8222, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.593396273819192e-05, |
|
"loss": 0.8258, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.591698981450283e-05, |
|
"loss": 0.8246, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.5899990621718232e-05, |
|
"loss": 0.868, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.588296523530782e-05, |
|
"loss": 0.835, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.5865913730857583e-05, |
|
"loss": 0.8298, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.584883618406946e-05, |
|
"loss": 0.8405, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.5831732670761e-05, |
|
"loss": 0.8951, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.5814603266865046e-05, |
|
"loss": 0.8396, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.5797448048429377e-05, |
|
"loss": 0.8433, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.5780267091616383e-05, |
|
"loss": 0.869, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.576306047270272e-05, |
|
"loss": 0.8454, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.574582826807897e-05, |
|
"loss": 0.7873, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.056884765625, |
|
"learning_rate": 1.5728570554249312e-05, |
|
"loss": 0.9116, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.571128740783117e-05, |
|
"loss": 0.8564, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.5693978905554886e-05, |
|
"loss": 0.8302, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.567664512426336e-05, |
|
"loss": 0.8041, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.5659286140911733e-05, |
|
"loss": 0.8421, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.05859375, |
|
"learning_rate": 1.5641902032567023e-05, |
|
"loss": 0.9291, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.562449287640781e-05, |
|
"loss": 0.8886, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.560705874972385e-05, |
|
"loss": 0.8472, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.5589599729915783e-05, |
|
"loss": 0.8108, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.5572115894494752e-05, |
|
"loss": 0.868, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.5554607321082077e-05, |
|
"loss": 0.8719, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.5537074087408894e-05, |
|
"loss": 0.8289, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 1.5519516271315834e-05, |
|
"loss": 0.8315, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.5501933950752655e-05, |
|
"loss": 0.7429, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.5484327203777917e-05, |
|
"loss": 0.7782, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.044189453125, |
|
"learning_rate": 1.5466696108558614e-05, |
|
"loss": 0.8454, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.544904074336983e-05, |
|
"loss": 0.7808, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.5431361186594415e-05, |
|
"loss": 0.7698, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.044921875, |
|
"learning_rate": 1.5413657516722607e-05, |
|
"loss": 0.8511, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.53959298123517e-05, |
|
"loss": 0.8284, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.5378178152185703e-05, |
|
"loss": 0.852, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.5360402615034958e-05, |
|
"loss": 0.8806, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.5342603279815826e-05, |
|
"loss": 0.778, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.5324780225550316e-05, |
|
"loss": 0.8209, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.5306933531365748e-05, |
|
"loss": 0.8506, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.5289063276494384e-05, |
|
"loss": 0.8609, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.5271169540273093e-05, |
|
"loss": 0.848, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.5253252402142989e-05, |
|
"loss": 0.7898, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.5235311941649085e-05, |
|
"loss": 0.845, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.5217348238439922e-05, |
|
"loss": 0.8216, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.5199361372267252e-05, |
|
"loss": 0.8517, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.5181351422985646e-05, |
|
"loss": 0.8148, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.516331847055216e-05, |
|
"loss": 0.8649, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.514526259502597e-05, |
|
"loss": 0.8321, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.5127183876568024e-05, |
|
"loss": 0.8175, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.5109082395440689e-05, |
|
"loss": 0.7704, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.0595703125, |
|
"learning_rate": 1.5090958232007383e-05, |
|
"loss": 0.9095, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.507281146673223e-05, |
|
"loss": 0.8498, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.5054642180179684e-05, |
|
"loss": 0.8709, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.5036450453014202e-05, |
|
"loss": 0.8439, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.5018236365999862e-05, |
|
"loss": 0.9223, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.045654296875, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8336, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.4981741435976882e-05, |
|
"loss": 0.8523, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.4963460754991309e-05, |
|
"loss": 0.8496, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.4945158038202274e-05, |
|
"loss": 0.8392, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.4926833366866611e-05, |
|
"loss": 0.7421, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 1.4908486822338611e-05, |
|
"loss": 0.8284, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.489011848606968e-05, |
|
"loss": 0.8383, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.4871728439607967e-05, |
|
"loss": 0.8099, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.4853316764598011e-05, |
|
"loss": 0.9332, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.4834883542780367e-05, |
|
"loss": 0.8748, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.4816428855991257e-05, |
|
"loss": 0.7789, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.4797952786162188e-05, |
|
"loss": 0.8209, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.4779455415319612e-05, |
|
"loss": 0.8381, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.4760936825584535e-05, |
|
"loss": 0.7684, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 1.4742397099172183e-05, |
|
"loss": 0.8275, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.4723836318391607e-05, |
|
"loss": 0.9173, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.4705254565645335e-05, |
|
"loss": 0.7534, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.4686651923429002e-05, |
|
"loss": 0.7822, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.4668028474330989e-05, |
|
"loss": 0.8575, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.4649384301032044e-05, |
|
"loss": 0.8385, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.4630719486304928e-05, |
|
"loss": 0.8523, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.4612034113014036e-05, |
|
"loss": 0.8569, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.4593328264115044e-05, |
|
"loss": 0.7559, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.4574602022654516e-05, |
|
"loss": 0.822, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.4555855471769572e-05, |
|
"loss": 0.7773, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.047607421875, |
|
"learning_rate": 1.4537088694687476e-05, |
|
"loss": 0.8264, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.4518301774725308e-05, |
|
"loss": 0.8323, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.0576171875, |
|
"learning_rate": 1.4499494795289562e-05, |
|
"loss": 0.8173, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.4480667839875786e-05, |
|
"loss": 0.8007, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.046630859375, |
|
"learning_rate": 1.4461820992068224e-05, |
|
"loss": 0.8152, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.4442954335539432e-05, |
|
"loss": 0.8258, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.4424067954049903e-05, |
|
"loss": 0.8003, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.4405161931447702e-05, |
|
"loss": 0.7678, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.4386236351668095e-05, |
|
"loss": 0.8295, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.06298828125, |
|
"learning_rate": 1.436729129873318e-05, |
|
"loss": 0.8286, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.4348326856751496e-05, |
|
"loss": 0.827, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.4329343109917671e-05, |
|
"loss": 0.7671, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.431034014251203e-05, |
|
"loss": 0.8195, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.4291318038900243e-05, |
|
"loss": 0.8331, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.4272276883532927e-05, |
|
"loss": 0.8458, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.4253216760945284e-05, |
|
"loss": 0.8311, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 1.423413775575672e-05, |
|
"loss": 0.8339, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.4215039952670482e-05, |
|
"loss": 0.811, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.060546875, |
|
"learning_rate": 1.4195923436473257e-05, |
|
"loss": 0.8431, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.4176788292034824e-05, |
|
"loss": 0.7789, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 1.4157634604307661e-05, |
|
"loss": 0.8773, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.413846245832657e-05, |
|
"loss": 0.8304, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.411927193920829e-05, |
|
"loss": 0.8578, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.4100063132151148e-05, |
|
"loss": 0.7993, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.408083612243465e-05, |
|
"loss": 0.8845, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.4061590995419118e-05, |
|
"loss": 0.8415, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.404232783654531e-05, |
|
"loss": 0.7521, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.402304673133403e-05, |
|
"loss": 0.8167, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.4003747765385767e-05, |
|
"loss": 0.7807, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.3984431024380301e-05, |
|
"loss": 0.8049, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.3965096594076322e-05, |
|
"loss": 0.8303, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.3945744560311056e-05, |
|
"loss": 0.8684, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.3926375008999887e-05, |
|
"loss": 0.7436, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.3906988026135957e-05, |
|
"loss": 0.8579, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.04736328125, |
|
"learning_rate": 1.3887583697789815e-05, |
|
"loss": 0.7671, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.3868162110109001e-05, |
|
"loss": 0.8025, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.3848723349317688e-05, |
|
"loss": 0.8687, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.382926750171629e-05, |
|
"loss": 0.8175, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 1.3809794653681075e-05, |
|
"loss": 0.8536, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.3790304891663793e-05, |
|
"loss": 0.8289, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.3770798302191279e-05, |
|
"loss": 0.7827, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.3751274971865086e-05, |
|
"loss": 0.8537, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.3731734987361069e-05, |
|
"loss": 0.8194, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.3712178435429044e-05, |
|
"loss": 0.7846, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.3692605402892369e-05, |
|
"loss": 0.8813, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.367301597664757e-05, |
|
"loss": 0.7698, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.3653410243663953e-05, |
|
"loss": 0.7923, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.3633788290983221e-05, |
|
"loss": 0.7843, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.3614150205719086e-05, |
|
"loss": 0.8806, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.046142578125, |
|
"learning_rate": 1.3594496075056886e-05, |
|
"loss": 0.7934, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.3574825986253191e-05, |
|
"loss": 0.8547, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 1.3555140026635415e-05, |
|
"loss": 0.8556, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.3535438283601437e-05, |
|
"loss": 0.8136, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.3515720844619206e-05, |
|
"loss": 0.7409, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.3495987797226362e-05, |
|
"loss": 0.7693, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.3476239229029826e-05, |
|
"loss": 0.8478, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.05859375, |
|
"learning_rate": 1.3456475227705442e-05, |
|
"loss": 0.8639, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.059326171875, |
|
"learning_rate": 1.3436695880997551e-05, |
|
"loss": 0.8093, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.3416901276718643e-05, |
|
"loss": 0.7741, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.0615234375, |
|
"learning_rate": 1.339709150274893e-05, |
|
"loss": 0.8319, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.3377266647035977e-05, |
|
"loss": 0.8408, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 1.3357426797594309e-05, |
|
"loss": 0.7784, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.3337572042505007e-05, |
|
"loss": 0.7569, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.331770246991534e-05, |
|
"loss": 0.8598, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.3297818168038353e-05, |
|
"loss": 0.8132, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.3277919225152486e-05, |
|
"loss": 0.8481, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.3258005729601178e-05, |
|
"loss": 0.8206, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.3238077769792475e-05, |
|
"loss": 0.8263, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 1.321813543419864e-05, |
|
"loss": 0.8544, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.061279296875, |
|
"learning_rate": 1.3198178811355762e-05, |
|
"loss": 0.8752, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 1.3178207989863356e-05, |
|
"loss": 0.8917, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.3158223058383972e-05, |
|
"loss": 0.8589, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.3138224105642803e-05, |
|
"loss": 0.7553, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.0625, |
|
"learning_rate": 1.31182112204273e-05, |
|
"loss": 0.8247, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.058349609375, |
|
"learning_rate": 1.3098184491586752e-05, |
|
"loss": 0.8437, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.3078144008031924e-05, |
|
"loss": 0.8129, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.3058089858734637e-05, |
|
"loss": 0.7883, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.3038022132727388e-05, |
|
"loss": 0.889, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.3017940919102943e-05, |
|
"loss": 0.8434, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.05859375, |
|
"learning_rate": 1.2997846307013955e-05, |
|
"loss": 0.8118, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.2977738385672558e-05, |
|
"loss": 0.8348, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.295761724434997e-05, |
|
"loss": 0.8419, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 1.2937482972376104e-05, |
|
"loss": 0.8148, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.2917335659139166e-05, |
|
"loss": 0.8083, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.2897175394085266e-05, |
|
"loss": 0.7939, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.2877002266718011e-05, |
|
"loss": 0.8044, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.2856816366598103e-05, |
|
"loss": 0.809, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.2836617783342968e-05, |
|
"loss": 0.88, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.2816406606626324e-05, |
|
"loss": 0.7649, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.2796182926177809e-05, |
|
"loss": 0.8551, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.2775946831782565e-05, |
|
"loss": 0.8506, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.2755698413280853e-05, |
|
"loss": 0.8209, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.2735437760567644e-05, |
|
"loss": 0.847, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 1.2715164963592228e-05, |
|
"loss": 0.7587, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.2694880112357809e-05, |
|
"loss": 0.7927, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.2674583296921109e-05, |
|
"loss": 0.7415, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.2654274607391959e-05, |
|
"loss": 0.7919, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.2633954133932913e-05, |
|
"loss": 0.8203, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0478515625, |
|
"learning_rate": 1.2613621966758838e-05, |
|
"loss": 0.8021, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.2593278196136525e-05, |
|
"loss": 0.7899, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.056884765625, |
|
"learning_rate": 1.257292291238427e-05, |
|
"loss": 0.8331, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.255255620587148e-05, |
|
"loss": 0.7661, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.056396484375, |
|
"learning_rate": 1.2532178167018283e-05, |
|
"loss": 0.8314, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.2511788886295115e-05, |
|
"loss": 0.7618, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.2491388454222327e-05, |
|
"loss": 0.8003, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.2470976961369765e-05, |
|
"loss": 0.8458, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.0458984375, |
|
"learning_rate": 1.2450554498356388e-05, |
|
"loss": 0.7778, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 1.243012115584986e-05, |
|
"loss": 0.8396, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 1.2409677024566145e-05, |
|
"loss": 0.8113, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.2389222195269102e-05, |
|
"loss": 0.8426, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.2368756758770084e-05, |
|
"loss": 0.8192, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.2348280805927541e-05, |
|
"loss": 0.883, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.061279296875, |
|
"learning_rate": 1.2327794427646607e-05, |
|
"loss": 0.7733, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.06201171875, |
|
"learning_rate": 1.2307297714878706e-05, |
|
"loss": 0.8566, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.2286790758621132e-05, |
|
"loss": 0.86, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 1.2266273649916669e-05, |
|
"loss": 0.8344, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.07373046875, |
|
"learning_rate": 1.2245746479853168e-05, |
|
"loss": 0.9192, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.7797, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.2204662320223385e-05, |
|
"loss": 0.7331, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.046875, |
|
"learning_rate": 1.2184105513054539e-05, |
|
"loss": 0.8126, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.2163539009320691e-05, |
|
"loss": 0.8052, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.2142962900328994e-05, |
|
"loss": 0.7866, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.2122377277429231e-05, |
|
"loss": 0.8124, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 1.2101782232013436e-05, |
|
"loss": 0.8899, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.208117785551547e-05, |
|
"loss": 0.7966, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.2060564239410613e-05, |
|
"loss": 0.8062, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0634765625, |
|
"learning_rate": 1.2039941475215169e-05, |
|
"loss": 0.8438, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 1.2019309654486065e-05, |
|
"loss": 0.8648, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.1998668868820422e-05, |
|
"loss": 0.8274, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.1978019209855174e-05, |
|
"loss": 0.8074, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.195736076926664e-05, |
|
"loss": 0.8436, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.057861328125, |
|
"learning_rate": 1.1936693638770127e-05, |
|
"loss": 0.7833, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.1916017910119525e-05, |
|
"loss": 0.8146, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 1.1895333675106897e-05, |
|
"loss": 0.848, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.1874641025562065e-05, |
|
"loss": 0.8167, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.05517578125, |
|
"learning_rate": 1.185394005335222e-05, |
|
"loss": 0.8499, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.1833230850381488e-05, |
|
"loss": 0.7853, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 1.1812513508590541e-05, |
|
"loss": 0.8321, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.1791788119956191e-05, |
|
"loss": 0.7874, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 1.1771054776490968e-05, |
|
"loss": 0.8356, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.1750313570242721e-05, |
|
"loss": 0.8236, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.06103515625, |
|
"learning_rate": 1.1729564593294203e-05, |
|
"loss": 0.8301, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.048583984375, |
|
"learning_rate": 1.170880793776267e-05, |
|
"loss": 0.7926, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 1.168804369579947e-05, |
|
"loss": 0.7654, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.1667271959589623e-05, |
|
"loss": 0.7618, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.1646492821351428e-05, |
|
"loss": 0.8027, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.1625706373336046e-05, |
|
"loss": 0.8891, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.1604912707827083e-05, |
|
"loss": 0.85, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 1.1584111917140197e-05, |
|
"loss": 0.8244, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.1563304093622674e-05, |
|
"loss": 0.7958, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.1542489329653024e-05, |
|
"loss": 0.8329, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 1.1521667717640572e-05, |
|
"loss": 0.7596, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 1.1500839350025039e-05, |
|
"loss": 0.9115, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 1.1480004319276145e-05, |
|
"loss": 0.8006, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 1.1459162717893193e-05, |
|
"loss": 0.7918, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.143831463840465e-05, |
|
"loss": 0.7406, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.1417460173367748e-05, |
|
"loss": 0.7735, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.1396599415368062e-05, |
|
"loss": 0.7926, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 1.1375732457019118e-05, |
|
"loss": 0.8132, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.1354859390961958e-05, |
|
"loss": 0.7791, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.1333980309864743e-05, |
|
"loss": 0.8277, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 1.1313095306422336e-05, |
|
"loss": 0.8324, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.1292204473355897e-05, |
|
"loss": 0.8637, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.1271307903412469e-05, |
|
"loss": 0.8237, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 1.1250405689364561e-05, |
|
"loss": 0.8395, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.1229497924009731e-05, |
|
"loss": 0.7743, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 1.1208584700170203e-05, |
|
"loss": 0.7385, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.058837890625, |
|
"learning_rate": 1.1187666110692417e-05, |
|
"loss": 0.8611, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.06103515625, |
|
"learning_rate": 1.116674224844664e-05, |
|
"loss": 0.7648, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 1.1145813206326548e-05, |
|
"loss": 0.8365, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 1.1124879077248815e-05, |
|
"loss": 0.7706, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.11039399541527e-05, |
|
"loss": 0.8028, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 1.1082995929999626e-05, |
|
"loss": 0.7975, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.1062047097772783e-05, |
|
"loss": 0.8077, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.1041093550476706e-05, |
|
"loss": 0.794, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.056640625, |
|
"learning_rate": 1.1020135381136858e-05, |
|
"loss": 0.825, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.0999172682799227e-05, |
|
"loss": 0.8103, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 1.0978205548529902e-05, |
|
"loss": 0.838, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.0957234071414675e-05, |
|
"loss": 0.8659, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.0936258344558613e-05, |
|
"loss": 0.858, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.0576171875, |
|
"learning_rate": 1.091527846108565e-05, |
|
"loss": 0.7929, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.0894294514138169e-05, |
|
"loss": 0.7513, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 1.0873306596876602e-05, |
|
"loss": 0.8347, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.0852314802479009e-05, |
|
"loss": 0.8225, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 1.0831319224140653e-05, |
|
"loss": 0.8331, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 1.08103199550736e-05, |
|
"loss": 0.8195, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 1.0789317088506307e-05, |
|
"loss": 0.7831, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.0768310717683192e-05, |
|
"loss": 0.8176, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.04638671875, |
|
"learning_rate": 1.0747300935864245e-05, |
|
"loss": 0.7595, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.0726287836324583e-05, |
|
"loss": 0.8544, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.0705271512354068e-05, |
|
"loss": 0.88, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.0684252057256861e-05, |
|
"loss": 0.8787, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.06201171875, |
|
"learning_rate": 1.066322956435104e-05, |
|
"loss": 0.8907, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 1.0642204126968159e-05, |
|
"loss": 0.7765, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 1.062117583845285e-05, |
|
"loss": 0.7968, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.06001447921624e-05, |
|
"loss": 0.8493, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.07421875, |
|
"learning_rate": 1.0579111081466333e-05, |
|
"loss": 0.789, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.0558074799746019e-05, |
|
"loss": 0.8526, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 1.0537036040394226e-05, |
|
"loss": 0.8511, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.0515994896814731e-05, |
|
"loss": 0.8339, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 1.0494951462421893e-05, |
|
"loss": 0.7972, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.0473905830640239e-05, |
|
"loss": 0.8114, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 1.0452858094904053e-05, |
|
"loss": 0.7742, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.0431808348656961e-05, |
|
"loss": 0.8655, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 1.0410756685351517e-05, |
|
"loss": 0.8543, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 1.0389703198448784e-05, |
|
"loss": 0.8646, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.0368647981417917e-05, |
|
"loss": 0.8008, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.0347591127735754e-05, |
|
"loss": 0.839, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.05517578125, |
|
"learning_rate": 1.0326532730886405e-05, |
|
"loss": 0.749, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.0305472884360825e-05, |
|
"loss": 0.778, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 1.0284411681656408e-05, |
|
"loss": 0.7992, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.0263349216276564e-05, |
|
"loss": 0.8138, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.056396484375, |
|
"learning_rate": 1.0242285581730313e-05, |
|
"loss": 0.8241, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 1.022122087153187e-05, |
|
"loss": 0.7916, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 1.0200155179200214e-05, |
|
"loss": 0.8253, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.05517578125, |
|
"learning_rate": 1.0179088598258697e-05, |
|
"loss": 0.8981, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 1.0158021222234602e-05, |
|
"loss": 0.8234, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 1.0136953144658753e-05, |
|
"loss": 0.8459, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 1.0115884459065088e-05, |
|
"loss": 0.8396, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 1.009481525899024e-05, |
|
"loss": 0.8258, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 1.0073745637973125e-05, |
|
"loss": 0.7542, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 1.0052675689554534e-05, |
|
"loss": 0.7882, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 1.0031605507276705e-05, |
|
"loss": 0.8435, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 1.0010535184682921e-05, |
|
"loss": 0.7813, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 9.98946481531708e-06, |
|
"loss": 0.8568, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 9.968394492723298e-06, |
|
"loss": 0.7625, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 9.947324310445467e-06, |
|
"loss": 0.8384, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 9.926254362026875e-06, |
|
"loss": 0.8727, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 9.905184741009765e-06, |
|
"loss": 0.8494, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.06591796875, |
|
"learning_rate": 9.884115540934915e-06, |
|
"loss": 0.8167, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0576171875, |
|
"learning_rate": 9.863046855341247e-06, |
|
"loss": 0.7945, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 9.841978777765401e-06, |
|
"loss": 0.8663, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 9.820911401741306e-06, |
|
"loss": 0.8504, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 9.79984482079979e-06, |
|
"loss": 0.9436, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 9.778779128468133e-06, |
|
"loss": 0.7959, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 9.757714418269687e-06, |
|
"loss": 0.8337, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 9.73665078372344e-06, |
|
"loss": 0.8399, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 9.715588318343594e-06, |
|
"loss": 0.832, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 9.694527115639175e-06, |
|
"loss": 0.8985, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 9.673467269113599e-06, |
|
"loss": 0.7697, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 9.652408872264249e-06, |
|
"loss": 0.8257, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 9.631352018582088e-06, |
|
"loss": 0.8525, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 9.61029680155122e-06, |
|
"loss": 0.758, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 9.589243314648483e-06, |
|
"loss": 0.9023, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 9.568191651343042e-06, |
|
"loss": 0.7768, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 9.54714190509595e-06, |
|
"loss": 0.8373, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.059814453125, |
|
"learning_rate": 9.526094169359766e-06, |
|
"loss": 0.8108, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 9.50504853757811e-06, |
|
"loss": 0.8059, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.05908203125, |
|
"learning_rate": 9.48400510318527e-06, |
|
"loss": 0.8751, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.05029296875, |
|
"learning_rate": 9.462963959605777e-06, |
|
"loss": 0.8535, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 9.441925200253985e-06, |
|
"loss": 0.8148, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 9.420888918533669e-06, |
|
"loss": 0.8368, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.048095703125, |
|
"learning_rate": 9.399855207837606e-06, |
|
"loss": 0.9126, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 9.378824161547152e-06, |
|
"loss": 0.8548, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 9.357795873031841e-06, |
|
"loss": 0.9106, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.058349609375, |
|
"learning_rate": 9.336770435648963e-06, |
|
"loss": 0.8111, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.056640625, |
|
"learning_rate": 9.315747942743142e-06, |
|
"loss": 0.8079, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 9.294728487645934e-06, |
|
"loss": 0.7641, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05908203125, |
|
"learning_rate": 9.273712163675419e-06, |
|
"loss": 0.7871, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 9.252699064135759e-06, |
|
"loss": 0.8296, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 9.23168928231681e-06, |
|
"loss": 0.8139, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 9.210682911493697e-06, |
|
"loss": 0.8108, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 9.189680044926402e-06, |
|
"loss": 0.843, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.061279296875, |
|
"learning_rate": 9.168680775859352e-06, |
|
"loss": 0.8102, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0576171875, |
|
"learning_rate": 9.147685197520995e-06, |
|
"loss": 0.8662, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 9.126693403123398e-06, |
|
"loss": 0.9419, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 9.105705485861834e-06, |
|
"loss": 0.8292, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 9.084721538914354e-06, |
|
"loss": 0.8407, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 9.06374165544139e-06, |
|
"loss": 0.818, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.052490234375, |
|
"learning_rate": 9.042765928585327e-06, |
|
"loss": 0.8263, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05810546875, |
|
"learning_rate": 9.0217944514701e-06, |
|
"loss": 0.8929, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 9.000827317200778e-06, |
|
"loss": 0.8218, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0595703125, |
|
"learning_rate": 8.979864618863144e-06, |
|
"loss": 0.9103, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 8.958906449523295e-06, |
|
"loss": 0.7595, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 8.93795290222722e-06, |
|
"loss": 0.8868, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05712890625, |
|
"learning_rate": 8.917004070000377e-06, |
|
"loss": 0.8137, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.056640625, |
|
"learning_rate": 8.896060045847305e-06, |
|
"loss": 0.7945, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05810546875, |
|
"learning_rate": 8.875120922751186e-06, |
|
"loss": 0.8517, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 8.854186793673454e-06, |
|
"loss": 0.8248, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05224609375, |
|
"learning_rate": 8.833257751553365e-06, |
|
"loss": 0.7861, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.049072265625, |
|
"learning_rate": 8.812333889307586e-06, |
|
"loss": 0.8321, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 8.791415299829798e-06, |
|
"loss": 0.8041, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 8.77050207599027e-06, |
|
"loss": 0.8564, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.059326171875, |
|
"learning_rate": 8.749594310635442e-06, |
|
"loss": 0.811, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 8.728692096587536e-06, |
|
"loss": 0.8223, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.056884765625, |
|
"learning_rate": 8.707795526644107e-06, |
|
"loss": 0.8617, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 8.686904693577668e-06, |
|
"loss": 0.778, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 8.666019690135264e-06, |
|
"loss": 0.7807, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 8.645140609038045e-06, |
|
"loss": 0.8375, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.050048828125, |
|
"learning_rate": 8.624267542980882e-06, |
|
"loss": 0.8494, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 8.60340058463194e-06, |
|
"loss": 0.7645, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 8.582539826632253e-06, |
|
"loss": 0.8531, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 8.561685361595353e-06, |
|
"loss": 0.7901, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.04931640625, |
|
"learning_rate": 8.540837282106809e-06, |
|
"loss": 0.7988, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.05322265625, |
|
"learning_rate": 8.519995680723853e-06, |
|
"loss": 0.7846, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 8.499160649974964e-06, |
|
"loss": 0.7196, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 8.47833228235943e-06, |
|
"loss": 0.8331, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 8.457510670346976e-06, |
|
"loss": 0.7706, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 8.43669590637733e-06, |
|
"loss": 0.819, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.051025390625, |
|
"learning_rate": 8.415888082859806e-06, |
|
"loss": 0.8358, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 8.39508729217292e-06, |
|
"loss": 0.9253, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.05859375, |
|
"learning_rate": 8.374293626663958e-06, |
|
"loss": 0.9424, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.04833984375, |
|
"learning_rate": 8.353507178648572e-06, |
|
"loss": 0.7783, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 8.33272804041038e-06, |
|
"loss": 0.838, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.056640625, |
|
"learning_rate": 8.311956304200532e-06, |
|
"loss": 0.8241, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.07421875, |
|
"learning_rate": 8.291192062237329e-06, |
|
"loss": 0.8346, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 8.2704354067058e-06, |
|
"loss": 0.752, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 8.249686429757282e-06, |
|
"loss": 0.8214, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.05810546875, |
|
"learning_rate": 8.228945223509037e-06, |
|
"loss": 0.7654, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.058349609375, |
|
"learning_rate": 8.208211880043812e-06, |
|
"loss": 0.8251, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 8.187486491409462e-06, |
|
"loss": 0.8461, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 8.166769149618517e-06, |
|
"loss": 0.7852, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.05419921875, |
|
"learning_rate": 8.146059946647784e-06, |
|
"loss": 0.8457, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 8.125358974437933e-06, |
|
"loss": 0.868, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 8.104666324893106e-06, |
|
"loss": 0.7821, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.052978515625, |
|
"learning_rate": 8.083982089880477e-06, |
|
"loss": 0.8308, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.055908203125, |
|
"learning_rate": 8.063306361229876e-06, |
|
"loss": 0.8241, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 8.042639230733364e-06, |
|
"loss": 0.8983, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0556640625, |
|
"learning_rate": 8.021980790144828e-06, |
|
"loss": 0.8106, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.05078125, |
|
"learning_rate": 8.001331131179581e-06, |
|
"loss": 0.7587, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.061767578125, |
|
"learning_rate": 7.98069034551394e-06, |
|
"loss": 0.8053, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.057373046875, |
|
"learning_rate": 7.960058524784833e-06, |
|
"loss": 0.7977, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 7.939435760589392e-06, |
|
"loss": 0.8021, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 7.918822144484532e-06, |
|
"loss": 0.832, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.061279296875, |
|
"learning_rate": 7.898217767986562e-06, |
|
"loss": 0.8482, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.054443359375, |
|
"learning_rate": 7.877622722570772e-06, |
|
"loss": 0.8368, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.056396484375, |
|
"learning_rate": 7.857037099671008e-06, |
|
"loss": 0.8214, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.056396484375, |
|
"learning_rate": 7.836460990679312e-06, |
|
"loss": 0.8223, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 7.815894486945466e-06, |
|
"loss": 0.793, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.050537109375, |
|
"learning_rate": 7.795337679776613e-06, |
|
"loss": 0.8183, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.048828125, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.774, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 7.754253520146835e-06, |
|
"loss": 0.887, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 7.733726350083331e-06, |
|
"loss": 0.7979, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.05615234375, |
|
"learning_rate": 7.713209241378871e-06, |
|
"loss": 0.8028, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0546875, |
|
"learning_rate": 7.692702285121299e-06, |
|
"loss": 0.7976, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0498046875, |
|
"learning_rate": 7.672205572353394e-06, |
|
"loss": 0.747, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 7.65171919407246e-06, |
|
"loss": 0.7857, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 7.631243241229916e-06, |
|
"loss": 0.7942, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 7.610777804730903e-06, |
|
"loss": 0.8072, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 7.590322975433857e-06, |
|
"loss": 0.7873, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0537109375, |
|
"learning_rate": 7.56987884415014e-06, |
|
"loss": 0.8778, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.051513671875, |
|
"learning_rate": 7.549445501643615e-06, |
|
"loss": 0.7803, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 7.5290230386302384e-06, |
|
"loss": 0.8263, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.05126953125, |
|
"learning_rate": 7.508611545777679e-06, |
|
"loss": 0.8951, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 7.488211113704886e-06, |
|
"loss": 0.8128, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 7.46782183298172e-06, |
|
"loss": 0.833, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.056640625, |
|
"learning_rate": 7.447443794128525e-06, |
|
"loss": 0.783, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.052734375, |
|
"learning_rate": 7.427077087615735e-06, |
|
"loss": 0.8378, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.056884765625, |
|
"learning_rate": 7.406721803863475e-06, |
|
"loss": 0.8127, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.06640625, |
|
"learning_rate": 7.386378033241164e-06, |
|
"loss": 0.7863, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.054931640625, |
|
"learning_rate": 7.3660458660670905e-06, |
|
"loss": 0.795, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.060791015625, |
|
"learning_rate": 7.345725392608047e-06, |
|
"loss": 0.9132, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.052001953125, |
|
"learning_rate": 7.3254167030788955e-06, |
|
"loss": 0.8416, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.055419921875, |
|
"learning_rate": 7.305119887642191e-06, |
|
"loss": 0.8038, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.053466796875, |
|
"learning_rate": 7.284835036407776e-06, |
|
"loss": 0.8545, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.049560546875, |
|
"learning_rate": 7.26456223943236e-06, |
|
"loss": 0.8192, |
|
"step": 924 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1538, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 462, |
|
"total_flos": 2.5348887258259784e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|