| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1350, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0022222222222222222, | |
| "grad_norm": 75361.5234375, | |
| "learning_rate": 0.0, | |
| "loss": 0.6475, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0044444444444444444, | |
| "grad_norm": 979666.625, | |
| "learning_rate": 3.703703703703704e-07, | |
| "loss": 0.6324, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.006666666666666667, | |
| "grad_norm": 211083.375, | |
| "learning_rate": 7.407407407407408e-07, | |
| "loss": 0.6486, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.008888888888888889, | |
| "grad_norm": 398850.3125, | |
| "learning_rate": 1.1111111111111112e-06, | |
| "loss": 0.811, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.011111111111111112, | |
| "grad_norm": 660421.375, | |
| "learning_rate": 1.4814814814814817e-06, | |
| "loss": 0.6477, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013333333333333334, | |
| "grad_norm": 1308591.75, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.6637, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.015555555555555555, | |
| "grad_norm": 870002.25, | |
| "learning_rate": 2.2222222222222225e-06, | |
| "loss": 0.7728, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.017777777777777778, | |
| "grad_norm": 491811.40625, | |
| "learning_rate": 2.5925925925925925e-06, | |
| "loss": 0.7462, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 675543.8125, | |
| "learning_rate": 2.9629629629629633e-06, | |
| "loss": 0.7465, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.022222222222222223, | |
| "grad_norm": 166720.375, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.799, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.024444444444444446, | |
| "grad_norm": 930302.5, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.8028, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02666666666666667, | |
| "grad_norm": 1149363.875, | |
| "learning_rate": 4.074074074074075e-06, | |
| "loss": 0.7713, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.028888888888888888, | |
| "grad_norm": 470885.25, | |
| "learning_rate": 4.444444444444445e-06, | |
| "loss": 0.6513, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03111111111111111, | |
| "grad_norm": 520825.34375, | |
| "learning_rate": 4.814814814814815e-06, | |
| "loss": 0.4761, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03333333333333333, | |
| "grad_norm": 511413.9375, | |
| "learning_rate": 5.185185185185185e-06, | |
| "loss": 0.6312, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.035555555555555556, | |
| "grad_norm": 617626.375, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 0.7008, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.03777777777777778, | |
| "grad_norm": 175768.0625, | |
| "learning_rate": 5.925925925925927e-06, | |
| "loss": 0.9021, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 541401.5625, | |
| "learning_rate": 6.296296296296296e-06, | |
| "loss": 0.681, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.042222222222222223, | |
| "grad_norm": 214184.921875, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.8062, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 569162.3125, | |
| "learning_rate": 7.0370370370370375e-06, | |
| "loss": 0.7527, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04666666666666667, | |
| "grad_norm": 640316.3125, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.6391, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04888888888888889, | |
| "grad_norm": 229886.5625, | |
| "learning_rate": 7.777777777777777e-06, | |
| "loss": 0.6762, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.051111111111111114, | |
| "grad_norm": 2014340.25, | |
| "learning_rate": 8.14814814814815e-06, | |
| "loss": 0.5573, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 680286.5625, | |
| "learning_rate": 8.518518518518519e-06, | |
| "loss": 0.6578, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 569364.5, | |
| "learning_rate": 8.88888888888889e-06, | |
| "loss": 0.777, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.057777777777777775, | |
| "grad_norm": 2732369.25, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.7702, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 519576.375, | |
| "learning_rate": 9.62962962962963e-06, | |
| "loss": 0.7192, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.06222222222222222, | |
| "grad_norm": 206192.546875, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7174, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.06444444444444444, | |
| "grad_norm": 221324.625, | |
| "learning_rate": 1.037037037037037e-05, | |
| "loss": 0.6039, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.06666666666666667, | |
| "grad_norm": 1108986.0, | |
| "learning_rate": 1.074074074074074e-05, | |
| "loss": 0.763, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06888888888888889, | |
| "grad_norm": 639494.5, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 0.7044, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "grad_norm": 627301.375, | |
| "learning_rate": 1.1481481481481482e-05, | |
| "loss": 0.7324, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07333333333333333, | |
| "grad_norm": 714222.5, | |
| "learning_rate": 1.1851851851851853e-05, | |
| "loss": 0.6594, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.07555555555555556, | |
| "grad_norm": 639339.6875, | |
| "learning_rate": 1.2222222222222222e-05, | |
| "loss": 0.6238, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.07777777777777778, | |
| "grad_norm": 231330.515625, | |
| "learning_rate": 1.2592592592592592e-05, | |
| "loss": 0.6839, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 511528.15625, | |
| "learning_rate": 1.2962962962962962e-05, | |
| "loss": 0.5042, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08222222222222222, | |
| "grad_norm": 312614.03125, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7532, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08444444444444445, | |
| "grad_norm": 404826.78125, | |
| "learning_rate": 1.3703703703703704e-05, | |
| "loss": 0.6002, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08666666666666667, | |
| "grad_norm": 328368.75, | |
| "learning_rate": 1.4074074074074075e-05, | |
| "loss": 0.7407, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 212831.46875, | |
| "learning_rate": 1.4444444444444444e-05, | |
| "loss": 0.591, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09111111111111111, | |
| "grad_norm": 945027.1875, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 0.6917, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09333333333333334, | |
| "grad_norm": 1313241.375, | |
| "learning_rate": 1.5185185185185186e-05, | |
| "loss": 0.807, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.09555555555555556, | |
| "grad_norm": 635023.75, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 0.8375, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09777777777777778, | |
| "grad_norm": 2153337.0, | |
| "learning_rate": 1.5925925925925926e-05, | |
| "loss": 0.8316, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 376420.84375, | |
| "learning_rate": 1.62962962962963e-05, | |
| "loss": 0.7526, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.10222222222222223, | |
| "grad_norm": 791087.625, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.6498, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.10444444444444445, | |
| "grad_norm": 656814.375, | |
| "learning_rate": 1.7037037037037038e-05, | |
| "loss": 0.5489, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 1977482.875, | |
| "learning_rate": 1.740740740740741e-05, | |
| "loss": 0.6734, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.10888888888888888, | |
| "grad_norm": 744988.0, | |
| "learning_rate": 1.777777777777778e-05, | |
| "loss": 0.6745, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 1026407.4375, | |
| "learning_rate": 1.814814814814815e-05, | |
| "loss": 0.7124, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11333333333333333, | |
| "grad_norm": 2253132.25, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.6016, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11555555555555555, | |
| "grad_norm": 2241903.25, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 0.5899, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11777777777777777, | |
| "grad_norm": 838888.8125, | |
| "learning_rate": 1.925925925925926e-05, | |
| "loss": 0.7287, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 228881.84375, | |
| "learning_rate": 1.962962962962963e-05, | |
| "loss": 0.5912, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.12222222222222222, | |
| "grad_norm": 1143221.875, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7826, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.12444444444444444, | |
| "grad_norm": 851040.6875, | |
| "learning_rate": 2.037037037037037e-05, | |
| "loss": 0.514, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12666666666666668, | |
| "grad_norm": 638632.0, | |
| "learning_rate": 2.074074074074074e-05, | |
| "loss": 0.5897, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1288888888888889, | |
| "grad_norm": 688196.3125, | |
| "learning_rate": 2.111111111111111e-05, | |
| "loss": 0.634, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.13111111111111112, | |
| "grad_norm": 352046.25, | |
| "learning_rate": 2.148148148148148e-05, | |
| "loss": 0.5804, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 272880.15625, | |
| "learning_rate": 2.1851851851851852e-05, | |
| "loss": 0.7051, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.13555555555555557, | |
| "grad_norm": 686219.4375, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.7302, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13777777777777778, | |
| "grad_norm": 589322.25, | |
| "learning_rate": 2.2592592592592594e-05, | |
| "loss": 0.6278, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1640305.5, | |
| "learning_rate": 2.2962962962962965e-05, | |
| "loss": 0.7666, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "grad_norm": 563189.0625, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 0.6705, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.14444444444444443, | |
| "grad_norm": 1177888.375, | |
| "learning_rate": 2.3703703703703707e-05, | |
| "loss": 0.6395, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.14666666666666667, | |
| "grad_norm": 355971.625, | |
| "learning_rate": 2.4074074074074074e-05, | |
| "loss": 0.6872, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.14888888888888888, | |
| "grad_norm": 1110798.625, | |
| "learning_rate": 2.4444444444444445e-05, | |
| "loss": 0.8045, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.1511111111111111, | |
| "grad_norm": 234297.390625, | |
| "learning_rate": 2.4814814814814816e-05, | |
| "loss": 0.6402, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.15333333333333332, | |
| "grad_norm": 944397.0, | |
| "learning_rate": 2.5185185185185183e-05, | |
| "loss": 0.7907, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.15555555555555556, | |
| "grad_norm": 595624.0625, | |
| "learning_rate": 2.5555555555555554e-05, | |
| "loss": 0.7191, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15777777777777777, | |
| "grad_norm": 785496.6875, | |
| "learning_rate": 2.5925925925925925e-05, | |
| "loss": 0.6487, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 731935.3125, | |
| "learning_rate": 2.6296296296296296e-05, | |
| "loss": 0.6053, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1622222222222222, | |
| "grad_norm": 369573.5, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.7378, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.16444444444444445, | |
| "grad_norm": 389220.34375, | |
| "learning_rate": 2.7037037037037037e-05, | |
| "loss": 0.7437, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 1437347.5, | |
| "learning_rate": 2.7407407407407408e-05, | |
| "loss": 0.5898, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1688888888888889, | |
| "grad_norm": 282015.8125, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.7979, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.1711111111111111, | |
| "grad_norm": 3742327.5, | |
| "learning_rate": 2.814814814814815e-05, | |
| "loss": 0.6935, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.17333333333333334, | |
| "grad_norm": 634704.25, | |
| "learning_rate": 2.851851851851852e-05, | |
| "loss": 0.6088, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.17555555555555555, | |
| "grad_norm": 322738.6875, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 0.7765, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 460376.125, | |
| "learning_rate": 2.925925925925926e-05, | |
| "loss": 0.6523, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 337121.34375, | |
| "learning_rate": 2.962962962962963e-05, | |
| "loss": 0.6013, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.18222222222222223, | |
| "grad_norm": 380048.375, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5171, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.18444444444444444, | |
| "grad_norm": 187856.171875, | |
| "learning_rate": 3.037037037037037e-05, | |
| "loss": 0.6407, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.18666666666666668, | |
| "grad_norm": 424345.5625, | |
| "learning_rate": 3.074074074074074e-05, | |
| "loss": 0.6888, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.18888888888888888, | |
| "grad_norm": 330755.9375, | |
| "learning_rate": 3.111111111111111e-05, | |
| "loss": 0.7823, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.19111111111111112, | |
| "grad_norm": 757690.4375, | |
| "learning_rate": 3.148148148148148e-05, | |
| "loss": 0.6836, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.19333333333333333, | |
| "grad_norm": 615780.3125, | |
| "learning_rate": 3.185185185185185e-05, | |
| "loss": 0.7781, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.19555555555555557, | |
| "grad_norm": 322161.71875, | |
| "learning_rate": 3.222222222222223e-05, | |
| "loss": 0.5926, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.19777777777777777, | |
| "grad_norm": 522235.53125, | |
| "learning_rate": 3.25925925925926e-05, | |
| "loss": 0.6607, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1229829.25, | |
| "learning_rate": 3.2962962962962964e-05, | |
| "loss": 0.5887, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.20222222222222222, | |
| "grad_norm": 882748.0625, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.6813, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.20444444444444446, | |
| "grad_norm": 889859.8125, | |
| "learning_rate": 3.3703703703703706e-05, | |
| "loss": 0.7057, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.20666666666666667, | |
| "grad_norm": 579578.375, | |
| "learning_rate": 3.4074074074074077e-05, | |
| "loss": 0.6658, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2088888888888889, | |
| "grad_norm": 2560019.25, | |
| "learning_rate": 3.444444444444445e-05, | |
| "loss": 0.6808, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2111111111111111, | |
| "grad_norm": 992348.1875, | |
| "learning_rate": 3.481481481481482e-05, | |
| "loss": 0.7378, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 456105.78125, | |
| "learning_rate": 3.518518518518519e-05, | |
| "loss": 0.6837, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.21555555555555556, | |
| "grad_norm": 911418.9375, | |
| "learning_rate": 3.555555555555556e-05, | |
| "loss": 0.5922, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.21777777777777776, | |
| "grad_norm": 516042.90625, | |
| "learning_rate": 3.592592592592593e-05, | |
| "loss": 0.5892, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 300478.25, | |
| "learning_rate": 3.62962962962963e-05, | |
| "loss": 0.5852, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 216423.0, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 0.6518, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22444444444444445, | |
| "grad_norm": 606593.8125, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.7084, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.22666666666666666, | |
| "grad_norm": 325548.90625, | |
| "learning_rate": 3.740740740740741e-05, | |
| "loss": 0.5128, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.2288888888888889, | |
| "grad_norm": 616763.25, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 0.6194, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.2311111111111111, | |
| "grad_norm": 1810430.5, | |
| "learning_rate": 3.814814814814815e-05, | |
| "loss": 0.7078, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.23333333333333334, | |
| "grad_norm": 708301.125, | |
| "learning_rate": 3.851851851851852e-05, | |
| "loss": 0.5812, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.23555555555555555, | |
| "grad_norm": 349738.53125, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 0.5519, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.23777777777777778, | |
| "grad_norm": 307563.15625, | |
| "learning_rate": 3.925925925925926e-05, | |
| "loss": 0.6777, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1156388.125, | |
| "learning_rate": 3.962962962962963e-05, | |
| "loss": 0.6964, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.24222222222222223, | |
| "grad_norm": 1874161.625, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6031, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.24444444444444444, | |
| "grad_norm": 648151.75, | |
| "learning_rate": 4.0370370370370374e-05, | |
| "loss": 0.5189, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24666666666666667, | |
| "grad_norm": 484538.03125, | |
| "learning_rate": 4.074074074074074e-05, | |
| "loss": 0.5967, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.24888888888888888, | |
| "grad_norm": 549294.625, | |
| "learning_rate": 4.111111111111111e-05, | |
| "loss": 0.622, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.2511111111111111, | |
| "grad_norm": 422266.71875, | |
| "learning_rate": 4.148148148148148e-05, | |
| "loss": 0.5626, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.25333333333333335, | |
| "grad_norm": 4367868.0, | |
| "learning_rate": 4.185185185185185e-05, | |
| "loss": 0.7472, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.25555555555555554, | |
| "grad_norm": 692434.8125, | |
| "learning_rate": 4.222222222222222e-05, | |
| "loss": 0.5585, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.2577777777777778, | |
| "grad_norm": 945114.75, | |
| "learning_rate": 4.259259259259259e-05, | |
| "loss": 0.6109, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 689406.625, | |
| "learning_rate": 4.296296296296296e-05, | |
| "loss": 0.5677, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.26222222222222225, | |
| "grad_norm": 1730677.875, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 0.6684, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.2644444444444444, | |
| "grad_norm": 813010.0625, | |
| "learning_rate": 4.3703703703703705e-05, | |
| "loss": 0.7408, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 673770.125, | |
| "learning_rate": 4.4074074074074076e-05, | |
| "loss": 0.6679, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2688888888888889, | |
| "grad_norm": 363243.59375, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.5323, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.27111111111111114, | |
| "grad_norm": 975494.6875, | |
| "learning_rate": 4.481481481481482e-05, | |
| "loss": 0.7136, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2733333333333333, | |
| "grad_norm": 645500.375, | |
| "learning_rate": 4.518518518518519e-05, | |
| "loss": 0.5937, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.27555555555555555, | |
| "grad_norm": 485883.9375, | |
| "learning_rate": 4.555555555555556e-05, | |
| "loss": 0.7137, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 1330097.0, | |
| "learning_rate": 4.592592592592593e-05, | |
| "loss": 0.7694, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1731506.125, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.798, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.2822222222222222, | |
| "grad_norm": 2576107.75, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 0.5908, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 631525.375, | |
| "learning_rate": 4.703703703703704e-05, | |
| "loss": 0.4795, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.2866666666666667, | |
| "grad_norm": 437369.09375, | |
| "learning_rate": 4.740740740740741e-05, | |
| "loss": 0.621, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.28888888888888886, | |
| "grad_norm": 522823.84375, | |
| "learning_rate": 4.7777777777777784e-05, | |
| "loss": 0.5231, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.2911111111111111, | |
| "grad_norm": 239169.03125, | |
| "learning_rate": 4.814814814814815e-05, | |
| "loss": 0.533, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.29333333333333333, | |
| "grad_norm": 1025517.25, | |
| "learning_rate": 4.851851851851852e-05, | |
| "loss": 0.7916, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.29555555555555557, | |
| "grad_norm": 780890.125, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 0.6586, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.29777777777777775, | |
| "grad_norm": 239262.828125, | |
| "learning_rate": 4.925925925925926e-05, | |
| "loss": 0.5212, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1036039.0, | |
| "learning_rate": 4.962962962962963e-05, | |
| "loss": 0.5101, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.3022222222222222, | |
| "grad_norm": 576536.625, | |
| "learning_rate": 5e-05, | |
| "loss": 0.673, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.30444444444444446, | |
| "grad_norm": 648396.125, | |
| "learning_rate": 4.995884773662552e-05, | |
| "loss": 0.6622, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.30666666666666664, | |
| "grad_norm": 727469.6875, | |
| "learning_rate": 4.991769547325103e-05, | |
| "loss": 0.6532, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3088888888888889, | |
| "grad_norm": 534778.5625, | |
| "learning_rate": 4.987654320987655e-05, | |
| "loss": 0.5044, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3111111111111111, | |
| "grad_norm": 436606.625, | |
| "learning_rate": 4.983539094650206e-05, | |
| "loss": 0.588, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.31333333333333335, | |
| "grad_norm": 484253.90625, | |
| "learning_rate": 4.9794238683127575e-05, | |
| "loss": 0.5805, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.31555555555555553, | |
| "grad_norm": 1102731.625, | |
| "learning_rate": 4.9753086419753084e-05, | |
| "loss": 0.7003, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.31777777777777777, | |
| "grad_norm": 1500681.75, | |
| "learning_rate": 4.971193415637861e-05, | |
| "loss": 0.6748, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 3508447.5, | |
| "learning_rate": 4.967078189300412e-05, | |
| "loss": 0.6002, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.32222222222222224, | |
| "grad_norm": 532125.9375, | |
| "learning_rate": 4.962962962962963e-05, | |
| "loss": 0.4983, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.3244444444444444, | |
| "grad_norm": 1056551.25, | |
| "learning_rate": 4.958847736625515e-05, | |
| "loss": 0.6097, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.32666666666666666, | |
| "grad_norm": 481759.28125, | |
| "learning_rate": 4.9547325102880656e-05, | |
| "loss": 0.5946, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3288888888888889, | |
| "grad_norm": 626184.0625, | |
| "learning_rate": 4.950617283950618e-05, | |
| "loss": 0.5802, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.33111111111111113, | |
| "grad_norm": 673809.4375, | |
| "learning_rate": 4.946502057613169e-05, | |
| "loss": 0.6302, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 1093257.5, | |
| "learning_rate": 4.9423868312757204e-05, | |
| "loss": 0.6589, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.33555555555555555, | |
| "grad_norm": 214708.546875, | |
| "learning_rate": 4.938271604938271e-05, | |
| "loss": 0.5819, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.3377777777777778, | |
| "grad_norm": 873922.3125, | |
| "learning_rate": 4.9341563786008236e-05, | |
| "loss": 0.5499, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 477493.90625, | |
| "learning_rate": 4.930041152263375e-05, | |
| "loss": 0.5954, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.3422222222222222, | |
| "grad_norm": 2321650.25, | |
| "learning_rate": 4.925925925925926e-05, | |
| "loss": 0.7243, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.34444444444444444, | |
| "grad_norm": 646336.1875, | |
| "learning_rate": 4.9218106995884777e-05, | |
| "loss": 0.5562, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.3466666666666667, | |
| "grad_norm": 664530.0, | |
| "learning_rate": 4.9176954732510286e-05, | |
| "loss": 0.6897, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.3488888888888889, | |
| "grad_norm": 559558.9375, | |
| "learning_rate": 4.913580246913581e-05, | |
| "loss": 0.7113, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.3511111111111111, | |
| "grad_norm": 533661.0625, | |
| "learning_rate": 4.909465020576132e-05, | |
| "loss": 0.5504, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.35333333333333333, | |
| "grad_norm": 1041493.0, | |
| "learning_rate": 4.905349794238683e-05, | |
| "loss": 0.7329, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 1396587.375, | |
| "learning_rate": 4.901234567901235e-05, | |
| "loss": 0.6192, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.35777777777777775, | |
| "grad_norm": 1317377.625, | |
| "learning_rate": 4.8971193415637865e-05, | |
| "loss": 0.6089, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1898401.25, | |
| "learning_rate": 4.893004115226338e-05, | |
| "loss": 0.5226, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.3622222222222222, | |
| "grad_norm": 927057.1875, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 0.5559, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.36444444444444446, | |
| "grad_norm": 699609.625, | |
| "learning_rate": 4.8847736625514406e-05, | |
| "loss": 0.7064, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.36666666666666664, | |
| "grad_norm": 1169845.25, | |
| "learning_rate": 4.8806584362139915e-05, | |
| "loss": 0.6133, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.3688888888888889, | |
| "grad_norm": 255957.734375, | |
| "learning_rate": 4.876543209876544e-05, | |
| "loss": 0.5894, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.3711111111111111, | |
| "grad_norm": 546818.125, | |
| "learning_rate": 4.872427983539095e-05, | |
| "loss": 0.6591, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.37333333333333335, | |
| "grad_norm": 439788.78125, | |
| "learning_rate": 4.868312757201646e-05, | |
| "loss": 0.6026, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.37555555555555553, | |
| "grad_norm": 1009843.0625, | |
| "learning_rate": 4.864197530864198e-05, | |
| "loss": 0.5856, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.37777777777777777, | |
| "grad_norm": 488187.84375, | |
| "learning_rate": 4.860082304526749e-05, | |
| "loss": 0.6526, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1203392.0, | |
| "learning_rate": 4.855967078189301e-05, | |
| "loss": 0.5753, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.38222222222222224, | |
| "grad_norm": 331781.71875, | |
| "learning_rate": 4.851851851851852e-05, | |
| "loss": 0.644, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.3844444444444444, | |
| "grad_norm": 957421.0, | |
| "learning_rate": 4.8477366255144035e-05, | |
| "loss": 0.5646, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.38666666666666666, | |
| "grad_norm": 369945.78125, | |
| "learning_rate": 4.843621399176955e-05, | |
| "loss": 0.5822, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 703213.8125, | |
| "learning_rate": 4.8395061728395067e-05, | |
| "loss": 0.6133, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.39111111111111113, | |
| "grad_norm": 939171.25, | |
| "learning_rate": 4.835390946502058e-05, | |
| "loss": 0.6379, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.3933333333333333, | |
| "grad_norm": 705173.375, | |
| "learning_rate": 4.831275720164609e-05, | |
| "loss": 0.6845, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.39555555555555555, | |
| "grad_norm": 491406.125, | |
| "learning_rate": 4.827160493827161e-05, | |
| "loss": 0.6156, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3977777777777778, | |
| "grad_norm": 218537.390625, | |
| "learning_rate": 4.8230452674897116e-05, | |
| "loss": 0.6632, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 421861.8125, | |
| "learning_rate": 4.818930041152264e-05, | |
| "loss": 0.5522, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4022222222222222, | |
| "grad_norm": 754662.4375, | |
| "learning_rate": 4.814814814814815e-05, | |
| "loss": 0.524, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.40444444444444444, | |
| "grad_norm": 599035.3125, | |
| "learning_rate": 4.8106995884773664e-05, | |
| "loss": 0.6015, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4066666666666667, | |
| "grad_norm": 567925.3125, | |
| "learning_rate": 4.806584362139918e-05, | |
| "loss": 0.6269, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4088888888888889, | |
| "grad_norm": 205344.453125, | |
| "learning_rate": 4.8024691358024696e-05, | |
| "loss": 0.5824, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4111111111111111, | |
| "grad_norm": 363204.75, | |
| "learning_rate": 4.798353909465021e-05, | |
| "loss": 0.5746, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.41333333333333333, | |
| "grad_norm": 595512.25, | |
| "learning_rate": 4.794238683127572e-05, | |
| "loss": 0.5905, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.41555555555555557, | |
| "grad_norm": 768830.75, | |
| "learning_rate": 4.7901234567901237e-05, | |
| "loss": 0.6321, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4177777777777778, | |
| "grad_norm": 711439.1875, | |
| "learning_rate": 4.7860082304526746e-05, | |
| "loss": 0.7406, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2251242.25, | |
| "learning_rate": 4.781893004115227e-05, | |
| "loss": 0.6063, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.4222222222222222, | |
| "grad_norm": 581344.6875, | |
| "learning_rate": 4.7777777777777784e-05, | |
| "loss": 0.7016, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.42444444444444446, | |
| "grad_norm": 652845.25, | |
| "learning_rate": 4.773662551440329e-05, | |
| "loss": 0.6249, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.4266666666666667, | |
| "grad_norm": 450982.625, | |
| "learning_rate": 4.769547325102881e-05, | |
| "loss": 0.6877, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.4288888888888889, | |
| "grad_norm": 2374753.75, | |
| "learning_rate": 4.7654320987654325e-05, | |
| "loss": 0.6585, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.4311111111111111, | |
| "grad_norm": 1042813.6875, | |
| "learning_rate": 4.761316872427984e-05, | |
| "loss": 0.6447, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.43333333333333335, | |
| "grad_norm": 460134.09375, | |
| "learning_rate": 4.757201646090535e-05, | |
| "loss": 0.6245, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.43555555555555553, | |
| "grad_norm": 1168080.5, | |
| "learning_rate": 4.7530864197530866e-05, | |
| "loss": 0.6067, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.43777777777777777, | |
| "grad_norm": 760828.0625, | |
| "learning_rate": 4.748971193415638e-05, | |
| "loss": 0.5517, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 432214.28125, | |
| "learning_rate": 4.74485596707819e-05, | |
| "loss": 0.544, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.44222222222222224, | |
| "grad_norm": 1489343.0, | |
| "learning_rate": 4.740740740740741e-05, | |
| "loss": 0.4873, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 848339.625, | |
| "learning_rate": 4.736625514403292e-05, | |
| "loss": 0.6345, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.44666666666666666, | |
| "grad_norm": 2258669.25, | |
| "learning_rate": 4.732510288065844e-05, | |
| "loss": 0.4431, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.4488888888888889, | |
| "grad_norm": 534571.0625, | |
| "learning_rate": 4.7283950617283954e-05, | |
| "loss": 0.5757, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.45111111111111113, | |
| "grad_norm": 1082905.875, | |
| "learning_rate": 4.724279835390947e-05, | |
| "loss": 0.6787, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.4533333333333333, | |
| "grad_norm": 1464140.0, | |
| "learning_rate": 4.7201646090534986e-05, | |
| "loss": 0.4967, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.45555555555555555, | |
| "grad_norm": 593933.625, | |
| "learning_rate": 4.7160493827160495e-05, | |
| "loss": 0.6246, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.4577777777777778, | |
| "grad_norm": 813640.75, | |
| "learning_rate": 4.711934156378601e-05, | |
| "loss": 0.47, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 418782.53125, | |
| "learning_rate": 4.7078189300411527e-05, | |
| "loss": 0.4363, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.4622222222222222, | |
| "grad_norm": 648853.5625, | |
| "learning_rate": 4.703703703703704e-05, | |
| "loss": 0.582, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.46444444444444444, | |
| "grad_norm": 803393.0, | |
| "learning_rate": 4.699588477366255e-05, | |
| "loss": 0.6732, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.4666666666666667, | |
| "grad_norm": 553974.375, | |
| "learning_rate": 4.695473251028807e-05, | |
| "loss": 0.527, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4688888888888889, | |
| "grad_norm": 1254667.0, | |
| "learning_rate": 4.691358024691358e-05, | |
| "loss": 0.5885, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.4711111111111111, | |
| "grad_norm": 1502191.0, | |
| "learning_rate": 4.68724279835391e-05, | |
| "loss": 0.5172, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.47333333333333333, | |
| "grad_norm": 310824.96875, | |
| "learning_rate": 4.6831275720164615e-05, | |
| "loss": 0.6461, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.47555555555555556, | |
| "grad_norm": 461764.15625, | |
| "learning_rate": 4.6790123456790124e-05, | |
| "loss": 0.509, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.4777777777777778, | |
| "grad_norm": 702569.8125, | |
| "learning_rate": 4.674897119341564e-05, | |
| "loss": 0.5263, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 483824.375, | |
| "learning_rate": 4.6707818930041156e-05, | |
| "loss": 0.6557, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.4822222222222222, | |
| "grad_norm": 295729.90625, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 0.5004, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.48444444444444446, | |
| "grad_norm": 468939.78125, | |
| "learning_rate": 4.662551440329218e-05, | |
| "loss": 0.5232, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.4866666666666667, | |
| "grad_norm": 171850.140625, | |
| "learning_rate": 4.6584362139917697e-05, | |
| "loss": 0.6334, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.4888888888888889, | |
| "grad_norm": 1578345.875, | |
| "learning_rate": 4.654320987654321e-05, | |
| "loss": 0.7786, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4911111111111111, | |
| "grad_norm": 1356533.75, | |
| "learning_rate": 4.650205761316873e-05, | |
| "loss": 0.5164, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.49333333333333335, | |
| "grad_norm": 634727.375, | |
| "learning_rate": 4.6460905349794244e-05, | |
| "loss": 0.4403, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.4955555555555556, | |
| "grad_norm": 239289.609375, | |
| "learning_rate": 4.641975308641975e-05, | |
| "loss": 0.6065, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.49777777777777776, | |
| "grad_norm": 240202.625, | |
| "learning_rate": 4.637860082304527e-05, | |
| "loss": 0.4874, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1101113.75, | |
| "learning_rate": 4.6337448559670785e-05, | |
| "loss": 0.5963, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5022222222222222, | |
| "grad_norm": 399643.0625, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.5306, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.5044444444444445, | |
| "grad_norm": 454625.84375, | |
| "learning_rate": 4.625514403292182e-05, | |
| "loss": 0.7048, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.5066666666666667, | |
| "grad_norm": 502732.28125, | |
| "learning_rate": 4.6213991769547326e-05, | |
| "loss": 0.5246, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.5088888888888888, | |
| "grad_norm": 302046.5625, | |
| "learning_rate": 4.617283950617284e-05, | |
| "loss": 0.6615, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.5111111111111111, | |
| "grad_norm": 1041901.5, | |
| "learning_rate": 4.613168724279836e-05, | |
| "loss": 0.7639, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5133333333333333, | |
| "grad_norm": 464965.53125, | |
| "learning_rate": 4.609053497942387e-05, | |
| "loss": 0.6237, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5155555555555555, | |
| "grad_norm": 751392.25, | |
| "learning_rate": 4.604938271604938e-05, | |
| "loss": 0.6681, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5177777777777778, | |
| "grad_norm": 538912.8125, | |
| "learning_rate": 4.60082304526749e-05, | |
| "loss": 0.6773, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 598586.375, | |
| "learning_rate": 4.5967078189300414e-05, | |
| "loss": 0.6382, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5222222222222223, | |
| "grad_norm": 525750.0, | |
| "learning_rate": 4.592592592592593e-05, | |
| "loss": 0.5312, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5244444444444445, | |
| "grad_norm": 563147.6875, | |
| "learning_rate": 4.5884773662551446e-05, | |
| "loss": 0.5385, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5266666666666666, | |
| "grad_norm": 1363912.125, | |
| "learning_rate": 4.5843621399176955e-05, | |
| "loss": 0.7997, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5288888888888889, | |
| "grad_norm": 488355.625, | |
| "learning_rate": 4.580246913580247e-05, | |
| "loss": 0.5144, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.5311111111111111, | |
| "grad_norm": 680616.4375, | |
| "learning_rate": 4.5761316872427987e-05, | |
| "loss": 0.5847, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 627066.8125, | |
| "learning_rate": 4.57201646090535e-05, | |
| "loss": 0.5937, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5355555555555556, | |
| "grad_norm": 982689.1875, | |
| "learning_rate": 4.567901234567901e-05, | |
| "loss": 0.4186, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5377777777777778, | |
| "grad_norm": 726951.875, | |
| "learning_rate": 4.563786008230453e-05, | |
| "loss": 0.6762, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 312047.375, | |
| "learning_rate": 4.559670781893004e-05, | |
| "loss": 0.6264, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5422222222222223, | |
| "grad_norm": 786597.3125, | |
| "learning_rate": 4.555555555555556e-05, | |
| "loss": 0.9229, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5444444444444444, | |
| "grad_norm": 967135.0625, | |
| "learning_rate": 4.5514403292181075e-05, | |
| "loss": 0.4451, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5466666666666666, | |
| "grad_norm": 1016416.0, | |
| "learning_rate": 4.5473251028806584e-05, | |
| "loss": 0.6044, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5488888888888889, | |
| "grad_norm": 175218.09375, | |
| "learning_rate": 4.54320987654321e-05, | |
| "loss": 0.6136, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5511111111111111, | |
| "grad_norm": 803901.375, | |
| "learning_rate": 4.5390946502057616e-05, | |
| "loss": 0.6505, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5533333333333333, | |
| "grad_norm": 352020.40625, | |
| "learning_rate": 4.534979423868313e-05, | |
| "loss": 0.4413, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 2258271.25, | |
| "learning_rate": 4.530864197530865e-05, | |
| "loss": 0.4909, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5577777777777778, | |
| "grad_norm": 378156.25, | |
| "learning_rate": 4.5267489711934157e-05, | |
| "loss": 0.6123, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 815982.0, | |
| "learning_rate": 4.522633744855967e-05, | |
| "loss": 0.6559, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5622222222222222, | |
| "grad_norm": 2519179.75, | |
| "learning_rate": 4.518518518518519e-05, | |
| "loss": 0.5177, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.5644444444444444, | |
| "grad_norm": 806211.6875, | |
| "learning_rate": 4.5144032921810704e-05, | |
| "loss": 0.5855, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.5666666666666667, | |
| "grad_norm": 458326.0625, | |
| "learning_rate": 4.510288065843621e-05, | |
| "loss": 0.5142, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5688888888888889, | |
| "grad_norm": 1957645.25, | |
| "learning_rate": 4.506172839506173e-05, | |
| "loss": 0.5814, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.5711111111111111, | |
| "grad_norm": 15964121.0, | |
| "learning_rate": 4.5020576131687245e-05, | |
| "loss": 0.7108, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.5733333333333334, | |
| "grad_norm": 703737.875, | |
| "learning_rate": 4.497942386831276e-05, | |
| "loss": 0.5115, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.5755555555555556, | |
| "grad_norm": 1874186.375, | |
| "learning_rate": 4.493827160493828e-05, | |
| "loss": 0.5556, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.5777777777777777, | |
| "grad_norm": 2454680.5, | |
| "learning_rate": 4.4897119341563786e-05, | |
| "loss": 0.5399, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 490995.4375, | |
| "learning_rate": 4.48559670781893e-05, | |
| "loss": 0.508, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.5822222222222222, | |
| "grad_norm": 935022.3125, | |
| "learning_rate": 4.481481481481482e-05, | |
| "loss": 0.5846, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5844444444444444, | |
| "grad_norm": 1142683.0, | |
| "learning_rate": 4.477366255144033e-05, | |
| "loss": 0.7131, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5866666666666667, | |
| "grad_norm": 422861.84375, | |
| "learning_rate": 4.473251028806584e-05, | |
| "loss": 0.7322, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5888888888888889, | |
| "grad_norm": 572327.75, | |
| "learning_rate": 4.469135802469136e-05, | |
| "loss": 0.6128, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5911111111111111, | |
| "grad_norm": 774320.0, | |
| "learning_rate": 4.4650205761316874e-05, | |
| "loss": 0.5271, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.5933333333333334, | |
| "grad_norm": 280896.9375, | |
| "learning_rate": 4.460905349794239e-05, | |
| "loss": 0.5551, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5955555555555555, | |
| "grad_norm": 848952.5625, | |
| "learning_rate": 4.4567901234567906e-05, | |
| "loss": 0.673, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5977777777777777, | |
| "grad_norm": 749778.4375, | |
| "learning_rate": 4.4526748971193415e-05, | |
| "loss": 0.6507, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 454624.96875, | |
| "learning_rate": 4.448559670781893e-05, | |
| "loss": 0.5128, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6022222222222222, | |
| "grad_norm": 252433.75, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.5532, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.6044444444444445, | |
| "grad_norm": 1700119.875, | |
| "learning_rate": 4.440329218106996e-05, | |
| "loss": 0.7207, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.6066666666666667, | |
| "grad_norm": 542159.4375, | |
| "learning_rate": 4.436213991769548e-05, | |
| "loss": 0.5605, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.6088888888888889, | |
| "grad_norm": 655691.8125, | |
| "learning_rate": 4.432098765432099e-05, | |
| "loss": 0.5493, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 1386989.375, | |
| "learning_rate": 4.42798353909465e-05, | |
| "loss": 0.6047, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6133333333333333, | |
| "grad_norm": 378723.8125, | |
| "learning_rate": 4.423868312757202e-05, | |
| "loss": 0.5983, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6155555555555555, | |
| "grad_norm": 412323.6875, | |
| "learning_rate": 4.4197530864197535e-05, | |
| "loss": 0.4852, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6177777777777778, | |
| "grad_norm": 949697.5625, | |
| "learning_rate": 4.4156378600823044e-05, | |
| "loss": 0.5116, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 142350.5625, | |
| "learning_rate": 4.411522633744856e-05, | |
| "loss": 0.5351, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 180120.21875, | |
| "learning_rate": 4.4074074074074076e-05, | |
| "loss": 0.487, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6244444444444445, | |
| "grad_norm": 806719.6875, | |
| "learning_rate": 4.403292181069959e-05, | |
| "loss": 0.5948, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6266666666666667, | |
| "grad_norm": 637918.25, | |
| "learning_rate": 4.399176954732511e-05, | |
| "loss": 0.5571, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.6288888888888889, | |
| "grad_norm": 218954.140625, | |
| "learning_rate": 4.3950617283950617e-05, | |
| "loss": 0.5268, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6311111111111111, | |
| "grad_norm": 265361.21875, | |
| "learning_rate": 4.390946502057613e-05, | |
| "loss": 0.6142, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6333333333333333, | |
| "grad_norm": 841901.4375, | |
| "learning_rate": 4.386831275720165e-05, | |
| "loss": 0.5919, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6355555555555555, | |
| "grad_norm": 945528.875, | |
| "learning_rate": 4.3827160493827164e-05, | |
| "loss": 0.5273, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6377777777777778, | |
| "grad_norm": 1093268.5, | |
| "learning_rate": 4.378600823045268e-05, | |
| "loss": 0.4842, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 311890.375, | |
| "learning_rate": 4.374485596707819e-05, | |
| "loss": 0.5431, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.6422222222222222, | |
| "grad_norm": 668244.25, | |
| "learning_rate": 4.3703703703703705e-05, | |
| "loss": 0.5782, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6444444444444445, | |
| "grad_norm": 199099.53125, | |
| "learning_rate": 4.366255144032922e-05, | |
| "loss": 0.6093, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6466666666666666, | |
| "grad_norm": 646594.125, | |
| "learning_rate": 4.3621399176954737e-05, | |
| "loss": 0.5381, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6488888888888888, | |
| "grad_norm": 801783.3125, | |
| "learning_rate": 4.3580246913580246e-05, | |
| "loss": 0.7125, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.6511111111111111, | |
| "grad_norm": 1490818.5, | |
| "learning_rate": 4.353909465020576e-05, | |
| "loss": 0.6342, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6533333333333333, | |
| "grad_norm": 661538.0, | |
| "learning_rate": 4.349794238683128e-05, | |
| "loss": 0.5878, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6555555555555556, | |
| "grad_norm": 872336.4375, | |
| "learning_rate": 4.345679012345679e-05, | |
| "loss": 0.477, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6577777777777778, | |
| "grad_norm": 1284983.875, | |
| "learning_rate": 4.341563786008231e-05, | |
| "loss": 0.516, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 469390.59375, | |
| "learning_rate": 4.337448559670782e-05, | |
| "loss": 0.4943, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6622222222222223, | |
| "grad_norm": 892792.9375, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 0.4624, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.6644444444444444, | |
| "grad_norm": 571622.625, | |
| "learning_rate": 4.329218106995885e-05, | |
| "loss": 0.6525, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1495432.875, | |
| "learning_rate": 4.3251028806584366e-05, | |
| "loss": 0.4023, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6688888888888889, | |
| "grad_norm": 758325.4375, | |
| "learning_rate": 4.3209876543209875e-05, | |
| "loss": 0.7577, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.6711111111111111, | |
| "grad_norm": 640554.1875, | |
| "learning_rate": 4.316872427983539e-05, | |
| "loss": 0.6508, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.6733333333333333, | |
| "grad_norm": 2041099.5, | |
| "learning_rate": 4.3127572016460907e-05, | |
| "loss": 0.6445, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.6755555555555556, | |
| "grad_norm": 278156.78125, | |
| "learning_rate": 4.308641975308642e-05, | |
| "loss": 0.6027, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.6777777777777778, | |
| "grad_norm": 1308681.5, | |
| "learning_rate": 4.304526748971194e-05, | |
| "loss": 0.4822, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 967817.8125, | |
| "learning_rate": 4.300411522633745e-05, | |
| "loss": 0.5359, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.6822222222222222, | |
| "grad_norm": 223118.21875, | |
| "learning_rate": 4.296296296296296e-05, | |
| "loss": 0.4957, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.6844444444444444, | |
| "grad_norm": 1438647.25, | |
| "learning_rate": 4.292181069958848e-05, | |
| "loss": 0.6234, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.6866666666666666, | |
| "grad_norm": 1326128.0, | |
| "learning_rate": 4.2880658436213995e-05, | |
| "loss": 0.5423, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.6888888888888889, | |
| "grad_norm": 419145.375, | |
| "learning_rate": 4.283950617283951e-05, | |
| "loss": 0.559, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6911111111111111, | |
| "grad_norm": 538809.1875, | |
| "learning_rate": 4.279835390946502e-05, | |
| "loss": 0.5048, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.6933333333333334, | |
| "grad_norm": 308140.90625, | |
| "learning_rate": 4.2757201646090536e-05, | |
| "loss": 0.5409, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.6955555555555556, | |
| "grad_norm": 901349.375, | |
| "learning_rate": 4.271604938271605e-05, | |
| "loss": 0.6302, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.6977777777777778, | |
| "grad_norm": 825236.5, | |
| "learning_rate": 4.267489711934157e-05, | |
| "loss": 0.6041, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 550996.1875, | |
| "learning_rate": 4.2633744855967077e-05, | |
| "loss": 0.4659, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.7022222222222222, | |
| "grad_norm": 446906.25, | |
| "learning_rate": 4.259259259259259e-05, | |
| "loss": 0.4984, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.7044444444444444, | |
| "grad_norm": 653586.5625, | |
| "learning_rate": 4.255144032921811e-05, | |
| "loss": 0.6108, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.7066666666666667, | |
| "grad_norm": 1313717.75, | |
| "learning_rate": 4.2510288065843624e-05, | |
| "loss": 0.6092, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.7088888888888889, | |
| "grad_norm": 885470.5625, | |
| "learning_rate": 4.246913580246914e-05, | |
| "loss": 0.5388, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 461374.0, | |
| "learning_rate": 4.242798353909465e-05, | |
| "loss": 0.4756, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7133333333333334, | |
| "grad_norm": 203098.859375, | |
| "learning_rate": 4.2386831275720165e-05, | |
| "loss": 0.6083, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.7155555555555555, | |
| "grad_norm": 477118.0625, | |
| "learning_rate": 4.234567901234568e-05, | |
| "loss": 0.5137, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7177777777777777, | |
| "grad_norm": 607326.5, | |
| "learning_rate": 4.2304526748971197e-05, | |
| "loss": 0.4768, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 254751.46875, | |
| "learning_rate": 4.2263374485596706e-05, | |
| "loss": 0.6813, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 574567.0, | |
| "learning_rate": 4.222222222222222e-05, | |
| "loss": 0.5853, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7244444444444444, | |
| "grad_norm": 2085169.875, | |
| "learning_rate": 4.2181069958847744e-05, | |
| "loss": 0.5218, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7266666666666667, | |
| "grad_norm": 1202527.75, | |
| "learning_rate": 4.213991769547325e-05, | |
| "loss": 0.4943, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7288888888888889, | |
| "grad_norm": 296164.25, | |
| "learning_rate": 4.209876543209877e-05, | |
| "loss": 0.5333, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.7311111111111112, | |
| "grad_norm": 275263.03125, | |
| "learning_rate": 4.205761316872428e-05, | |
| "loss": 0.4711, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.7333333333333333, | |
| "grad_norm": 973031.8125, | |
| "learning_rate": 4.2016460905349794e-05, | |
| "loss": 0.5963, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7355555555555555, | |
| "grad_norm": 230538.0625, | |
| "learning_rate": 4.197530864197531e-05, | |
| "loss": 0.6437, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.7377777777777778, | |
| "grad_norm": 668760.5625, | |
| "learning_rate": 4.1934156378600826e-05, | |
| "loss": 0.5524, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 353451.75, | |
| "learning_rate": 4.189300411522634e-05, | |
| "loss": 0.498, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7422222222222222, | |
| "grad_norm": 280192.4375, | |
| "learning_rate": 4.185185185185185e-05, | |
| "loss": 0.5159, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7444444444444445, | |
| "grad_norm": 340414.5625, | |
| "learning_rate": 4.181069958847737e-05, | |
| "loss": 0.5469, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7466666666666667, | |
| "grad_norm": 748978.5625, | |
| "learning_rate": 4.176954732510288e-05, | |
| "loss": 0.6063, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7488888888888889, | |
| "grad_norm": 374095.09375, | |
| "learning_rate": 4.17283950617284e-05, | |
| "loss": 0.734, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7511111111111111, | |
| "grad_norm": 163727.234375, | |
| "learning_rate": 4.168724279835391e-05, | |
| "loss": 0.4815, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.7533333333333333, | |
| "grad_norm": 336208.46875, | |
| "learning_rate": 4.164609053497942e-05, | |
| "loss": 0.6262, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.7555555555555555, | |
| "grad_norm": 606244.875, | |
| "learning_rate": 4.1604938271604946e-05, | |
| "loss": 0.583, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7577777777777778, | |
| "grad_norm": 531861.75, | |
| "learning_rate": 4.1563786008230455e-05, | |
| "loss": 0.5549, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 489404.5625, | |
| "learning_rate": 4.152263374485597e-05, | |
| "loss": 0.5868, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7622222222222222, | |
| "grad_norm": 770754.875, | |
| "learning_rate": 4.148148148148148e-05, | |
| "loss": 0.6607, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7644444444444445, | |
| "grad_norm": 444973.46875, | |
| "learning_rate": 4.1440329218106996e-05, | |
| "loss": 0.7014, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7666666666666667, | |
| "grad_norm": 828877.0625, | |
| "learning_rate": 4.139917695473251e-05, | |
| "loss": 0.4733, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7688888888888888, | |
| "grad_norm": 2406807.25, | |
| "learning_rate": 4.135802469135803e-05, | |
| "loss": 0.6269, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7711111111111111, | |
| "grad_norm": 1750097.375, | |
| "learning_rate": 4.1316872427983537e-05, | |
| "loss": 0.6198, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.7733333333333333, | |
| "grad_norm": 1762184.75, | |
| "learning_rate": 4.127572016460905e-05, | |
| "loss": 0.4977, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.7755555555555556, | |
| "grad_norm": 242493.71875, | |
| "learning_rate": 4.1234567901234575e-05, | |
| "loss": 0.5969, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 3530379.75, | |
| "learning_rate": 4.1193415637860084e-05, | |
| "loss": 0.5063, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 532942.0625, | |
| "learning_rate": 4.11522633744856e-05, | |
| "loss": 0.4462, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.7822222222222223, | |
| "grad_norm": 2386480.5, | |
| "learning_rate": 4.111111111111111e-05, | |
| "loss": 0.5945, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.7844444444444445, | |
| "grad_norm": 207174.203125, | |
| "learning_rate": 4.1069958847736625e-05, | |
| "loss": 0.4863, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.7866666666666666, | |
| "grad_norm": 550860.6875, | |
| "learning_rate": 4.102880658436214e-05, | |
| "loss": 0.5111, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.7888888888888889, | |
| "grad_norm": 1387017.5, | |
| "learning_rate": 4.0987654320987657e-05, | |
| "loss": 0.5244, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.7911111111111111, | |
| "grad_norm": 443979.40625, | |
| "learning_rate": 4.094650205761317e-05, | |
| "loss": 0.5259, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.7933333333333333, | |
| "grad_norm": 167479.6875, | |
| "learning_rate": 4.090534979423868e-05, | |
| "loss": 0.5652, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.7955555555555556, | |
| "grad_norm": 268729.6875, | |
| "learning_rate": 4.0864197530864204e-05, | |
| "loss": 0.4428, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.7977777777777778, | |
| "grad_norm": 510909.34375, | |
| "learning_rate": 4.082304526748971e-05, | |
| "loss": 0.5002, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1276042.125, | |
| "learning_rate": 4.078189300411523e-05, | |
| "loss": 0.7983, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8022222222222222, | |
| "grad_norm": 696396.8125, | |
| "learning_rate": 4.074074074074074e-05, | |
| "loss": 0.5355, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.8044444444444444, | |
| "grad_norm": 520619.625, | |
| "learning_rate": 4.0699588477366254e-05, | |
| "loss": 0.6058, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.8066666666666666, | |
| "grad_norm": 1366369.0, | |
| "learning_rate": 4.065843621399178e-05, | |
| "loss": 0.4865, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.8088888888888889, | |
| "grad_norm": 906512.5, | |
| "learning_rate": 4.0617283950617286e-05, | |
| "loss": 0.6686, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.8111111111111111, | |
| "grad_norm": 498396.90625, | |
| "learning_rate": 4.05761316872428e-05, | |
| "loss": 0.5721, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8133333333333334, | |
| "grad_norm": 506303.21875, | |
| "learning_rate": 4.053497942386831e-05, | |
| "loss": 0.5422, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.8155555555555556, | |
| "grad_norm": 8037728.0, | |
| "learning_rate": 4.049382716049383e-05, | |
| "loss": 0.6566, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.8177777777777778, | |
| "grad_norm": 936880.9375, | |
| "learning_rate": 4.045267489711934e-05, | |
| "loss": 0.4807, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1358766.25, | |
| "learning_rate": 4.041152263374486e-05, | |
| "loss": 0.4736, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8222222222222222, | |
| "grad_norm": 1406607.5, | |
| "learning_rate": 4.0370370370370374e-05, | |
| "loss": 0.4751, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8244444444444444, | |
| "grad_norm": 832850.125, | |
| "learning_rate": 4.032921810699588e-05, | |
| "loss": 0.5952, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.8266666666666667, | |
| "grad_norm": 500190.8125, | |
| "learning_rate": 4.0288065843621406e-05, | |
| "loss": 0.4909, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8288888888888889, | |
| "grad_norm": 193279.265625, | |
| "learning_rate": 4.0246913580246915e-05, | |
| "loss": 0.4518, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8311111111111111, | |
| "grad_norm": 1528230.625, | |
| "learning_rate": 4.020576131687243e-05, | |
| "loss": 0.5747, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 228102.15625, | |
| "learning_rate": 4.016460905349794e-05, | |
| "loss": 0.481, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8355555555555556, | |
| "grad_norm": 472035.78125, | |
| "learning_rate": 4.012345679012346e-05, | |
| "loss": 0.4831, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8377777777777777, | |
| "grad_norm": 546926.125, | |
| "learning_rate": 4.008230452674897e-05, | |
| "loss": 0.4395, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 949932.875, | |
| "learning_rate": 4.004115226337449e-05, | |
| "loss": 0.5096, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.8422222222222222, | |
| "grad_norm": 388450.03125, | |
| "learning_rate": 4e-05, | |
| "loss": 0.5798, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.8444444444444444, | |
| "grad_norm": 407452.59375, | |
| "learning_rate": 3.995884773662551e-05, | |
| "loss": 0.6373, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8466666666666667, | |
| "grad_norm": 781730.5625, | |
| "learning_rate": 3.9917695473251035e-05, | |
| "loss": 0.5271, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.8488888888888889, | |
| "grad_norm": 360056.3125, | |
| "learning_rate": 3.9876543209876544e-05, | |
| "loss": 0.54, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.8511111111111112, | |
| "grad_norm": 557841.125, | |
| "learning_rate": 3.983539094650206e-05, | |
| "loss": 0.6939, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 1382806.125, | |
| "learning_rate": 3.979423868312757e-05, | |
| "loss": 0.6029, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.8555555555555555, | |
| "grad_norm": 655178.8125, | |
| "learning_rate": 3.975308641975309e-05, | |
| "loss": 0.3791, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8577777777777778, | |
| "grad_norm": 265361.96875, | |
| "learning_rate": 3.971193415637861e-05, | |
| "loss": 0.4686, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 424605.125, | |
| "learning_rate": 3.9670781893004117e-05, | |
| "loss": 0.6816, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.8622222222222222, | |
| "grad_norm": 542365.8125, | |
| "learning_rate": 3.962962962962963e-05, | |
| "loss": 0.6044, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.8644444444444445, | |
| "grad_norm": 338897.46875, | |
| "learning_rate": 3.958847736625514e-05, | |
| "loss": 0.569, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.8666666666666667, | |
| "grad_norm": 1602508.375, | |
| "learning_rate": 3.9547325102880664e-05, | |
| "loss": 0.618, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8688888888888889, | |
| "grad_norm": 181145.4375, | |
| "learning_rate": 3.950617283950617e-05, | |
| "loss": 0.5979, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8711111111111111, | |
| "grad_norm": 704965.625, | |
| "learning_rate": 3.946502057613169e-05, | |
| "loss": 0.4592, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8733333333333333, | |
| "grad_norm": 2342850.25, | |
| "learning_rate": 3.9423868312757205e-05, | |
| "loss": 0.4666, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.8755555555555555, | |
| "grad_norm": 429568.09375, | |
| "learning_rate": 3.938271604938272e-05, | |
| "loss": 0.5621, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8777777777777778, | |
| "grad_norm": 787485.5, | |
| "learning_rate": 3.934156378600824e-05, | |
| "loss": 0.4794, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 4468053.0, | |
| "learning_rate": 3.9300411522633746e-05, | |
| "loss": 0.4851, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.8822222222222222, | |
| "grad_norm": 1580367.0, | |
| "learning_rate": 3.925925925925926e-05, | |
| "loss": 0.5972, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.8844444444444445, | |
| "grad_norm": 388921.8125, | |
| "learning_rate": 3.921810699588477e-05, | |
| "loss": 0.6099, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.8866666666666667, | |
| "grad_norm": 617553.5625, | |
| "learning_rate": 3.917695473251029e-05, | |
| "loss": 0.5251, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 429902.09375, | |
| "learning_rate": 3.91358024691358e-05, | |
| "loss": 0.5187, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8911111111111111, | |
| "grad_norm": 225324.890625, | |
| "learning_rate": 3.909465020576132e-05, | |
| "loss": 0.3663, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.8933333333333333, | |
| "grad_norm": 1841588.5, | |
| "learning_rate": 3.9053497942386834e-05, | |
| "loss": 0.5766, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.8955555555555555, | |
| "grad_norm": 1267175.125, | |
| "learning_rate": 3.901234567901234e-05, | |
| "loss": 0.5858, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.8977777777777778, | |
| "grad_norm": 716295.0, | |
| "learning_rate": 3.8971193415637866e-05, | |
| "loss": 0.4709, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2756277.25, | |
| "learning_rate": 3.8930041152263375e-05, | |
| "loss": 0.681, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.9022222222222223, | |
| "grad_norm": 1748439.125, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 0.6071, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.9044444444444445, | |
| "grad_norm": 610785.875, | |
| "learning_rate": 3.88477366255144e-05, | |
| "loss": 0.4121, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.9066666666666666, | |
| "grad_norm": 527703.75, | |
| "learning_rate": 3.880658436213992e-05, | |
| "loss": 0.6429, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.9088888888888889, | |
| "grad_norm": 385713.40625, | |
| "learning_rate": 3.876543209876544e-05, | |
| "loss": 0.5652, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.9111111111111111, | |
| "grad_norm": 403583.625, | |
| "learning_rate": 3.872427983539095e-05, | |
| "loss": 0.623, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9133333333333333, | |
| "grad_norm": 399882.75, | |
| "learning_rate": 3.868312757201646e-05, | |
| "loss": 0.5394, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.9155555555555556, | |
| "grad_norm": 312631.34375, | |
| "learning_rate": 3.864197530864197e-05, | |
| "loss": 0.566, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.9177777777777778, | |
| "grad_norm": 184664.34375, | |
| "learning_rate": 3.8600823045267495e-05, | |
| "loss": 0.3998, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 885300.0625, | |
| "learning_rate": 3.8559670781893004e-05, | |
| "loss": 0.6295, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9222222222222223, | |
| "grad_norm": 157462.03125, | |
| "learning_rate": 3.851851851851852e-05, | |
| "loss": 0.6012, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9244444444444444, | |
| "grad_norm": 298935.9375, | |
| "learning_rate": 3.8477366255144036e-05, | |
| "loss": 0.5729, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9266666666666666, | |
| "grad_norm": 290948.75, | |
| "learning_rate": 3.843621399176955e-05, | |
| "loss": 0.4952, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9288888888888889, | |
| "grad_norm": 1232251.875, | |
| "learning_rate": 3.839506172839507e-05, | |
| "loss": 0.4999, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.9311111111111111, | |
| "grad_norm": 441190.5, | |
| "learning_rate": 3.8353909465020577e-05, | |
| "loss": 0.4751, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.9333333333333333, | |
| "grad_norm": 420174.40625, | |
| "learning_rate": 3.831275720164609e-05, | |
| "loss": 0.569, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9355555555555556, | |
| "grad_norm": 794650.375, | |
| "learning_rate": 3.82716049382716e-05, | |
| "loss": 0.5852, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9377777777777778, | |
| "grad_norm": 436417.28125, | |
| "learning_rate": 3.8230452674897124e-05, | |
| "loss": 0.5244, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 390293.96875, | |
| "learning_rate": 3.818930041152264e-05, | |
| "loss": 0.6959, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.9422222222222222, | |
| "grad_norm": 562045.5, | |
| "learning_rate": 3.814814814814815e-05, | |
| "loss": 0.4438, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 345788.0625, | |
| "learning_rate": 3.8106995884773665e-05, | |
| "loss": 0.5413, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.9466666666666667, | |
| "grad_norm": 708525.6875, | |
| "learning_rate": 3.806584362139918e-05, | |
| "loss": 0.5812, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.9488888888888889, | |
| "grad_norm": 319743.25, | |
| "learning_rate": 3.80246913580247e-05, | |
| "loss": 0.4005, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.9511111111111111, | |
| "grad_norm": 669220.0625, | |
| "learning_rate": 3.7983539094650206e-05, | |
| "loss": 0.565, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.9533333333333334, | |
| "grad_norm": 683483.625, | |
| "learning_rate": 3.794238683127572e-05, | |
| "loss": 0.5445, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.9555555555555556, | |
| "grad_norm": 1566102.0, | |
| "learning_rate": 3.790123456790123e-05, | |
| "loss": 0.45, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9577777777777777, | |
| "grad_norm": 399423.53125, | |
| "learning_rate": 3.786008230452675e-05, | |
| "loss": 0.4618, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 536128.0625, | |
| "learning_rate": 3.781893004115227e-05, | |
| "loss": 0.6237, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.9622222222222222, | |
| "grad_norm": 632462.3125, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 0.7016, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.9644444444444444, | |
| "grad_norm": 1254881.0, | |
| "learning_rate": 3.7736625514403294e-05, | |
| "loss": 0.649, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.9666666666666667, | |
| "grad_norm": 2207048.75, | |
| "learning_rate": 3.769547325102881e-05, | |
| "loss": 0.8038, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.9688888888888889, | |
| "grad_norm": 2232515.5, | |
| "learning_rate": 3.7654320987654326e-05, | |
| "loss": 0.534, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.9711111111111111, | |
| "grad_norm": 425982.53125, | |
| "learning_rate": 3.7613168724279835e-05, | |
| "loss": 0.5403, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.9733333333333334, | |
| "grad_norm": 577288.75, | |
| "learning_rate": 3.757201646090535e-05, | |
| "loss": 0.581, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.9755555555555555, | |
| "grad_norm": 320666.09375, | |
| "learning_rate": 3.7530864197530867e-05, | |
| "loss": 0.4917, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 455171.28125, | |
| "learning_rate": 3.748971193415638e-05, | |
| "loss": 0.639, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 349861.1875, | |
| "learning_rate": 3.74485596707819e-05, | |
| "loss": 0.4139, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9822222222222222, | |
| "grad_norm": 1753616.125, | |
| "learning_rate": 3.740740740740741e-05, | |
| "loss": 0.6111, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9844444444444445, | |
| "grad_norm": 380102.625, | |
| "learning_rate": 3.736625514403292e-05, | |
| "loss": 0.532, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9866666666666667, | |
| "grad_norm": 227778.578125, | |
| "learning_rate": 3.732510288065844e-05, | |
| "loss": 0.5705, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.9888888888888889, | |
| "grad_norm": 523431.46875, | |
| "learning_rate": 3.7283950617283955e-05, | |
| "loss": 0.5337, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9911111111111112, | |
| "grad_norm": 449052.21875, | |
| "learning_rate": 3.724279835390947e-05, | |
| "loss": 0.5723, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.9933333333333333, | |
| "grad_norm": 1037056.75, | |
| "learning_rate": 3.720164609053498e-05, | |
| "loss": 0.6206, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.9955555555555555, | |
| "grad_norm": 746349.1875, | |
| "learning_rate": 3.7160493827160496e-05, | |
| "loss": 0.433, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.9977777777777778, | |
| "grad_norm": 247129.5, | |
| "learning_rate": 3.711934156378601e-05, | |
| "loss": 0.5245, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 473643.53125, | |
| "learning_rate": 3.707818930041153e-05, | |
| "loss": 0.4803, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0022222222222221, | |
| "grad_norm": 3388953.25, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.5192, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.0044444444444445, | |
| "grad_norm": 352336.65625, | |
| "learning_rate": 3.699588477366255e-05, | |
| "loss": 0.5511, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.0066666666666666, | |
| "grad_norm": 537367.0, | |
| "learning_rate": 3.695473251028807e-05, | |
| "loss": 0.4823, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.008888888888889, | |
| "grad_norm": 463618.0625, | |
| "learning_rate": 3.6913580246913584e-05, | |
| "loss": 0.6219, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.011111111111111, | |
| "grad_norm": 890988.0, | |
| "learning_rate": 3.68724279835391e-05, | |
| "loss": 0.6637, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.0133333333333334, | |
| "grad_norm": 834668.8125, | |
| "learning_rate": 3.683127572016461e-05, | |
| "loss": 0.5514, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.0155555555555555, | |
| "grad_norm": 478754.59375, | |
| "learning_rate": 3.6790123456790125e-05, | |
| "loss": 0.4442, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.0177777777777777, | |
| "grad_norm": 364720.5, | |
| "learning_rate": 3.674897119341564e-05, | |
| "loss": 0.5295, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 437859.78125, | |
| "learning_rate": 3.670781893004116e-05, | |
| "loss": 0.5602, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.0222222222222221, | |
| "grad_norm": 668818.375, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 0.6702, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0244444444444445, | |
| "grad_norm": 1322955.875, | |
| "learning_rate": 3.662551440329218e-05, | |
| "loss": 0.5504, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.0266666666666666, | |
| "grad_norm": 912058.6875, | |
| "learning_rate": 3.65843621399177e-05, | |
| "loss": 0.485, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.028888888888889, | |
| "grad_norm": 1165694.75, | |
| "learning_rate": 3.654320987654321e-05, | |
| "loss": 0.5364, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.031111111111111, | |
| "grad_norm": 410742.59375, | |
| "learning_rate": 3.650205761316873e-05, | |
| "loss": 0.642, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.0333333333333334, | |
| "grad_norm": 517038.625, | |
| "learning_rate": 3.646090534979424e-05, | |
| "loss": 0.6515, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.0355555555555556, | |
| "grad_norm": 642495.5, | |
| "learning_rate": 3.6419753086419754e-05, | |
| "loss": 0.5096, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.0377777777777777, | |
| "grad_norm": 757100.125, | |
| "learning_rate": 3.637860082304527e-05, | |
| "loss": 0.6411, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 865382.5, | |
| "learning_rate": 3.6337448559670786e-05, | |
| "loss": 0.5356, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.0422222222222222, | |
| "grad_norm": 2302121.75, | |
| "learning_rate": 3.62962962962963e-05, | |
| "loss": 0.6145, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.0444444444444445, | |
| "grad_norm": 410693.78125, | |
| "learning_rate": 3.625514403292181e-05, | |
| "loss": 0.4845, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0466666666666666, | |
| "grad_norm": 343380.8125, | |
| "learning_rate": 3.6213991769547327e-05, | |
| "loss": 0.5627, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.048888888888889, | |
| "grad_norm": 811735.8125, | |
| "learning_rate": 3.617283950617284e-05, | |
| "loss": 0.5982, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.051111111111111, | |
| "grad_norm": 898176.5, | |
| "learning_rate": 3.613168724279836e-05, | |
| "loss": 0.5189, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.0533333333333332, | |
| "grad_norm": 257329.40625, | |
| "learning_rate": 3.609053497942387e-05, | |
| "loss": 0.5023, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.0555555555555556, | |
| "grad_norm": 262981.125, | |
| "learning_rate": 3.604938271604938e-05, | |
| "loss": 0.5989, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.0577777777777777, | |
| "grad_norm": 284406.6875, | |
| "learning_rate": 3.60082304526749e-05, | |
| "loss": 0.5692, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 303435.46875, | |
| "learning_rate": 3.5967078189300415e-05, | |
| "loss": 0.4557, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.0622222222222222, | |
| "grad_norm": 512383.0625, | |
| "learning_rate": 3.592592592592593e-05, | |
| "loss": 0.4438, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.0644444444444445, | |
| "grad_norm": 362393.125, | |
| "learning_rate": 3.588477366255144e-05, | |
| "loss": 0.5733, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.0666666666666667, | |
| "grad_norm": 419564.6875, | |
| "learning_rate": 3.5843621399176956e-05, | |
| "loss": 0.5342, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.068888888888889, | |
| "grad_norm": 937582.8125, | |
| "learning_rate": 3.580246913580247e-05, | |
| "loss": 0.5937, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.0711111111111111, | |
| "grad_norm": 617804.5625, | |
| "learning_rate": 3.576131687242799e-05, | |
| "loss": 0.5317, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.0733333333333333, | |
| "grad_norm": 1090921.375, | |
| "learning_rate": 3.5720164609053497e-05, | |
| "loss": 0.5436, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.0755555555555556, | |
| "grad_norm": 393805.53125, | |
| "learning_rate": 3.567901234567901e-05, | |
| "loss": 0.6203, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.0777777777777777, | |
| "grad_norm": 401942.1875, | |
| "learning_rate": 3.563786008230453e-05, | |
| "loss": 0.5145, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 426967.34375, | |
| "learning_rate": 3.5596707818930044e-05, | |
| "loss": 0.5899, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.0822222222222222, | |
| "grad_norm": 1499625.25, | |
| "learning_rate": 3.555555555555556e-05, | |
| "loss": 0.5185, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.0844444444444445, | |
| "grad_norm": 119777.875, | |
| "learning_rate": 3.551440329218107e-05, | |
| "loss": 0.4702, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.0866666666666667, | |
| "grad_norm": 325876.65625, | |
| "learning_rate": 3.5473251028806585e-05, | |
| "loss": 0.4839, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.0888888888888888, | |
| "grad_norm": 1764555.875, | |
| "learning_rate": 3.54320987654321e-05, | |
| "loss": 0.4194, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.0911111111111111, | |
| "grad_norm": 992762.625, | |
| "learning_rate": 3.539094650205762e-05, | |
| "loss": 0.6011, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.0933333333333333, | |
| "grad_norm": 1109132.0, | |
| "learning_rate": 3.534979423868313e-05, | |
| "loss": 0.578, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.0955555555555556, | |
| "grad_norm": 338012.0625, | |
| "learning_rate": 3.530864197530864e-05, | |
| "loss": 0.6753, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.0977777777777777, | |
| "grad_norm": 489635.125, | |
| "learning_rate": 3.526748971193416e-05, | |
| "loss": 0.484, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 467980.4375, | |
| "learning_rate": 3.522633744855967e-05, | |
| "loss": 0.5041, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.1022222222222222, | |
| "grad_norm": 926030.25, | |
| "learning_rate": 3.518518518518519e-05, | |
| "loss": 0.6303, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.1044444444444443, | |
| "grad_norm": 514718.71875, | |
| "learning_rate": 3.51440329218107e-05, | |
| "loss": 0.5022, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.1066666666666667, | |
| "grad_norm": 1032987.9375, | |
| "learning_rate": 3.5102880658436214e-05, | |
| "loss": 0.5416, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.1088888888888888, | |
| "grad_norm": 743004.375, | |
| "learning_rate": 3.506172839506173e-05, | |
| "loss": 0.4624, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 542330.0625, | |
| "learning_rate": 3.5020576131687246e-05, | |
| "loss": 0.6888, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1133333333333333, | |
| "grad_norm": 956336.0, | |
| "learning_rate": 3.497942386831276e-05, | |
| "loss": 0.4579, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.1155555555555556, | |
| "grad_norm": 445892.0625, | |
| "learning_rate": 3.493827160493827e-05, | |
| "loss": 0.5064, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.1177777777777778, | |
| "grad_norm": 200526.984375, | |
| "learning_rate": 3.4897119341563787e-05, | |
| "loss": 0.4721, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 2516397.75, | |
| "learning_rate": 3.48559670781893e-05, | |
| "loss": 0.5609, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.1222222222222222, | |
| "grad_norm": 561231.625, | |
| "learning_rate": 3.481481481481482e-05, | |
| "loss": 0.5566, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1244444444444444, | |
| "grad_norm": 1745718.375, | |
| "learning_rate": 3.4773662551440334e-05, | |
| "loss": 0.5487, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.1266666666666667, | |
| "grad_norm": 233383.96875, | |
| "learning_rate": 3.473251028806584e-05, | |
| "loss": 0.5369, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.1288888888888888, | |
| "grad_norm": 681536.8125, | |
| "learning_rate": 3.469135802469136e-05, | |
| "loss": 0.6202, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.1311111111111112, | |
| "grad_norm": 460907.3125, | |
| "learning_rate": 3.4650205761316875e-05, | |
| "loss": 0.6093, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.1333333333333333, | |
| "grad_norm": 4109765.0, | |
| "learning_rate": 3.460905349794239e-05, | |
| "loss": 0.678, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1355555555555557, | |
| "grad_norm": 306640.21875, | |
| "learning_rate": 3.45679012345679e-05, | |
| "loss": 0.5777, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.1377777777777778, | |
| "grad_norm": 1322950.75, | |
| "learning_rate": 3.4526748971193416e-05, | |
| "loss": 0.5124, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.1400000000000001, | |
| "grad_norm": 820002.3125, | |
| "learning_rate": 3.448559670781893e-05, | |
| "loss": 0.5038, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.1422222222222222, | |
| "grad_norm": 1089420.5, | |
| "learning_rate": 3.444444444444445e-05, | |
| "loss": 0.512, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.1444444444444444, | |
| "grad_norm": 954156.25, | |
| "learning_rate": 3.440329218106996e-05, | |
| "loss": 0.5496, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.1466666666666667, | |
| "grad_norm": 667740.8125, | |
| "learning_rate": 3.436213991769547e-05, | |
| "loss": 0.5691, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.1488888888888888, | |
| "grad_norm": 1452467.375, | |
| "learning_rate": 3.432098765432099e-05, | |
| "loss": 0.4714, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.1511111111111112, | |
| "grad_norm": 875589.75, | |
| "learning_rate": 3.4279835390946504e-05, | |
| "loss": 0.535, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.1533333333333333, | |
| "grad_norm": 127607.7421875, | |
| "learning_rate": 3.423868312757202e-05, | |
| "loss": 0.4445, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.1555555555555554, | |
| "grad_norm": 533242.5, | |
| "learning_rate": 3.419753086419753e-05, | |
| "loss": 0.5453, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1577777777777778, | |
| "grad_norm": 1075770.25, | |
| "learning_rate": 3.4156378600823045e-05, | |
| "loss": 0.5702, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 364981.03125, | |
| "learning_rate": 3.411522633744856e-05, | |
| "loss": 0.5414, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.1622222222222223, | |
| "grad_norm": 588666.875, | |
| "learning_rate": 3.4074074074074077e-05, | |
| "loss": 0.5894, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.1644444444444444, | |
| "grad_norm": 2711980.25, | |
| "learning_rate": 3.403292181069959e-05, | |
| "loss": 0.5031, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 2523403.0, | |
| "learning_rate": 3.39917695473251e-05, | |
| "loss": 0.6124, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.1688888888888889, | |
| "grad_norm": 190597.90625, | |
| "learning_rate": 3.395061728395062e-05, | |
| "loss": 0.5361, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.1711111111111112, | |
| "grad_norm": 103686.34375, | |
| "learning_rate": 3.390946502057613e-05, | |
| "loss": 0.5721, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.1733333333333333, | |
| "grad_norm": 684715.5625, | |
| "learning_rate": 3.386831275720165e-05, | |
| "loss": 0.5941, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.1755555555555555, | |
| "grad_norm": 643524.8125, | |
| "learning_rate": 3.3827160493827165e-05, | |
| "loss": 0.5221, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.1777777777777778, | |
| "grad_norm": 951965.1875, | |
| "learning_rate": 3.3786008230452674e-05, | |
| "loss": 0.6495, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 1729085.0, | |
| "learning_rate": 3.374485596707819e-05, | |
| "loss": 0.4682, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.1822222222222223, | |
| "grad_norm": 355360.1875, | |
| "learning_rate": 3.3703703703703706e-05, | |
| "loss": 0.6197, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.1844444444444444, | |
| "grad_norm": 496380.0625, | |
| "learning_rate": 3.366255144032922e-05, | |
| "loss": 0.444, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.1866666666666668, | |
| "grad_norm": 409551.0, | |
| "learning_rate": 3.362139917695473e-05, | |
| "loss": 0.5369, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.1888888888888889, | |
| "grad_norm": 392581.9375, | |
| "learning_rate": 3.3580246913580247e-05, | |
| "loss": 0.7495, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.1911111111111112, | |
| "grad_norm": 881165.0, | |
| "learning_rate": 3.353909465020576e-05, | |
| "loss": 0.5686, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.1933333333333334, | |
| "grad_norm": 1053088.625, | |
| "learning_rate": 3.349794238683128e-05, | |
| "loss": 0.4673, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.1955555555555555, | |
| "grad_norm": 1257165.0, | |
| "learning_rate": 3.3456790123456794e-05, | |
| "loss": 0.4615, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.1977777777777778, | |
| "grad_norm": 2032428.375, | |
| "learning_rate": 3.34156378600823e-05, | |
| "loss": 0.5234, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 397717.59375, | |
| "learning_rate": 3.337448559670782e-05, | |
| "loss": 0.6048, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2022222222222223, | |
| "grad_norm": 470372.0625, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.5376, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.2044444444444444, | |
| "grad_norm": 340338.21875, | |
| "learning_rate": 3.329218106995885e-05, | |
| "loss": 0.5824, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.2066666666666666, | |
| "grad_norm": 1100316.0, | |
| "learning_rate": 3.325102880658436e-05, | |
| "loss": 0.6534, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.208888888888889, | |
| "grad_norm": 586183.625, | |
| "learning_rate": 3.3209876543209876e-05, | |
| "loss": 0.5192, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.211111111111111, | |
| "grad_norm": 187557.046875, | |
| "learning_rate": 3.316872427983539e-05, | |
| "loss": 0.5388, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.2133333333333334, | |
| "grad_norm": 821299.6875, | |
| "learning_rate": 3.312757201646091e-05, | |
| "loss": 0.6922, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.2155555555555555, | |
| "grad_norm": 202463.78125, | |
| "learning_rate": 3.308641975308642e-05, | |
| "loss": 0.4871, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.2177777777777778, | |
| "grad_norm": 827578.6875, | |
| "learning_rate": 3.304526748971193e-05, | |
| "loss": 0.6096, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 1085792.125, | |
| "learning_rate": 3.300411522633745e-05, | |
| "loss": 0.539, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 313487.625, | |
| "learning_rate": 3.2962962962962964e-05, | |
| "loss": 0.4972, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2244444444444444, | |
| "grad_norm": 1085682.25, | |
| "learning_rate": 3.292181069958848e-05, | |
| "loss": 0.4936, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.2266666666666666, | |
| "grad_norm": 796879.1875, | |
| "learning_rate": 3.2880658436213996e-05, | |
| "loss": 0.5272, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.228888888888889, | |
| "grad_norm": 1149724.125, | |
| "learning_rate": 3.2839506172839505e-05, | |
| "loss": 0.4691, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.231111111111111, | |
| "grad_norm": 887672.6875, | |
| "learning_rate": 3.279835390946502e-05, | |
| "loss": 0.5061, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.2333333333333334, | |
| "grad_norm": 1085029.75, | |
| "learning_rate": 3.2757201646090537e-05, | |
| "loss": 0.6217, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.2355555555555555, | |
| "grad_norm": 1328316.75, | |
| "learning_rate": 3.271604938271605e-05, | |
| "loss": 0.5258, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.2377777777777779, | |
| "grad_norm": 478540.59375, | |
| "learning_rate": 3.267489711934156e-05, | |
| "loss": 0.5077, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 516354.71875, | |
| "learning_rate": 3.263374485596708e-05, | |
| "loss": 0.596, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.2422222222222223, | |
| "grad_norm": 654598.375, | |
| "learning_rate": 3.25925925925926e-05, | |
| "loss": 0.5235, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.2444444444444445, | |
| "grad_norm": 277117.6875, | |
| "learning_rate": 3.255144032921811e-05, | |
| "loss": 0.4945, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2466666666666666, | |
| "grad_norm": 1819261.25, | |
| "learning_rate": 3.2510288065843625e-05, | |
| "loss": 0.6248, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.248888888888889, | |
| "grad_norm": 691423.875, | |
| "learning_rate": 3.2469135802469134e-05, | |
| "loss": 0.4982, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.251111111111111, | |
| "grad_norm": 409888.90625, | |
| "learning_rate": 3.242798353909465e-05, | |
| "loss": 0.5005, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.2533333333333334, | |
| "grad_norm": 337259.59375, | |
| "learning_rate": 3.2386831275720166e-05, | |
| "loss": 0.4968, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.2555555555555555, | |
| "grad_norm": 465535.34375, | |
| "learning_rate": 3.234567901234568e-05, | |
| "loss": 0.6587, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.2577777777777777, | |
| "grad_norm": 1500356.25, | |
| "learning_rate": 3.230452674897119e-05, | |
| "loss": 0.5014, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 209156.453125, | |
| "learning_rate": 3.2263374485596707e-05, | |
| "loss": 0.4308, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.2622222222222224, | |
| "grad_norm": 329407.0625, | |
| "learning_rate": 3.222222222222223e-05, | |
| "loss": 0.65, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.2644444444444445, | |
| "grad_norm": 944331.375, | |
| "learning_rate": 3.218106995884774e-05, | |
| "loss": 0.5737, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.2666666666666666, | |
| "grad_norm": 1461638.5, | |
| "learning_rate": 3.2139917695473254e-05, | |
| "loss": 0.5372, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.268888888888889, | |
| "grad_norm": 629504.4375, | |
| "learning_rate": 3.209876543209876e-05, | |
| "loss": 0.6163, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.271111111111111, | |
| "grad_norm": 1049412.5, | |
| "learning_rate": 3.205761316872428e-05, | |
| "loss": 0.5535, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.2733333333333334, | |
| "grad_norm": 238209.34375, | |
| "learning_rate": 3.2016460905349795e-05, | |
| "loss": 0.4736, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.2755555555555556, | |
| "grad_norm": 1106967.625, | |
| "learning_rate": 3.197530864197531e-05, | |
| "loss": 0.5857, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.2777777777777777, | |
| "grad_norm": 258857.421875, | |
| "learning_rate": 3.193415637860083e-05, | |
| "loss": 0.4707, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 653654.0625, | |
| "learning_rate": 3.1893004115226336e-05, | |
| "loss": 0.531, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.2822222222222222, | |
| "grad_norm": 614772.8125, | |
| "learning_rate": 3.185185185185185e-05, | |
| "loss": 0.4341, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.2844444444444445, | |
| "grad_norm": 867558.6875, | |
| "learning_rate": 3.181069958847737e-05, | |
| "loss": 0.5906, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.2866666666666666, | |
| "grad_norm": 567176.9375, | |
| "learning_rate": 3.176954732510288e-05, | |
| "loss": 0.5396, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.2888888888888888, | |
| "grad_norm": 438535.9375, | |
| "learning_rate": 3.172839506172839e-05, | |
| "loss": 0.5703, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.291111111111111, | |
| "grad_norm": 1499955.25, | |
| "learning_rate": 3.168724279835391e-05, | |
| "loss": 0.4399, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.2933333333333334, | |
| "grad_norm": 141409.5, | |
| "learning_rate": 3.164609053497943e-05, | |
| "loss": 0.6679, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.2955555555555556, | |
| "grad_norm": 398965.0625, | |
| "learning_rate": 3.160493827160494e-05, | |
| "loss": 0.4923, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.2977777777777777, | |
| "grad_norm": 2154293.5, | |
| "learning_rate": 3.1563786008230456e-05, | |
| "loss": 0.5707, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 456048.875, | |
| "learning_rate": 3.1522633744855965e-05, | |
| "loss": 0.6075, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.3022222222222222, | |
| "grad_norm": 543668.875, | |
| "learning_rate": 3.148148148148148e-05, | |
| "loss": 0.5149, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.3044444444444445, | |
| "grad_norm": 539372.8125, | |
| "learning_rate": 3.1440329218106997e-05, | |
| "loss": 0.6545, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.3066666666666666, | |
| "grad_norm": 504076.53125, | |
| "learning_rate": 3.139917695473251e-05, | |
| "loss": 0.7466, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.3088888888888888, | |
| "grad_norm": 400669.0, | |
| "learning_rate": 3.135802469135803e-05, | |
| "loss": 0.6429, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.3111111111111111, | |
| "grad_norm": 614820.4375, | |
| "learning_rate": 3.131687242798354e-05, | |
| "loss": 0.4616, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3133333333333335, | |
| "grad_norm": 187871.6875, | |
| "learning_rate": 3.127572016460906e-05, | |
| "loss": 0.5457, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.3155555555555556, | |
| "grad_norm": 322704.03125, | |
| "learning_rate": 3.123456790123457e-05, | |
| "loss": 0.492, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.3177777777777777, | |
| "grad_norm": 509237.46875, | |
| "learning_rate": 3.1193415637860085e-05, | |
| "loss": 0.5478, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 953086.1875, | |
| "learning_rate": 3.1152263374485594e-05, | |
| "loss": 0.515, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.3222222222222222, | |
| "grad_norm": 607668.625, | |
| "learning_rate": 3.111111111111111e-05, | |
| "loss": 0.5707, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.3244444444444445, | |
| "grad_norm": 2008874.0, | |
| "learning_rate": 3.1069958847736626e-05, | |
| "loss": 0.6644, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.3266666666666667, | |
| "grad_norm": 439735.1875, | |
| "learning_rate": 3.102880658436214e-05, | |
| "loss": 0.499, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.3288888888888888, | |
| "grad_norm": 635145.625, | |
| "learning_rate": 3.098765432098766e-05, | |
| "loss": 0.6213, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.3311111111111111, | |
| "grad_norm": 1621658.25, | |
| "learning_rate": 3.0946502057613167e-05, | |
| "loss": 0.5868, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 588194.4375, | |
| "learning_rate": 3.090534979423869e-05, | |
| "loss": 0.6255, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3355555555555556, | |
| "grad_norm": 644249.375, | |
| "learning_rate": 3.08641975308642e-05, | |
| "loss": 0.5518, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.3377777777777777, | |
| "grad_norm": 165082.265625, | |
| "learning_rate": 3.0823045267489714e-05, | |
| "loss": 0.5313, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 1069945.5, | |
| "learning_rate": 3.078189300411522e-05, | |
| "loss": 0.5828, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.3422222222222222, | |
| "grad_norm": 504896.5, | |
| "learning_rate": 3.074074074074074e-05, | |
| "loss": 0.4712, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.3444444444444446, | |
| "grad_norm": 1039296.625, | |
| "learning_rate": 3.069958847736626e-05, | |
| "loss": 0.5231, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.3466666666666667, | |
| "grad_norm": 1078974.625, | |
| "learning_rate": 3.065843621399177e-05, | |
| "loss": 0.5408, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.3488888888888888, | |
| "grad_norm": 266013.1875, | |
| "learning_rate": 3.061728395061729e-05, | |
| "loss": 0.5208, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.3511111111111112, | |
| "grad_norm": 245803.53125, | |
| "learning_rate": 3.0576131687242796e-05, | |
| "loss": 0.5754, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.3533333333333333, | |
| "grad_norm": 1278539.875, | |
| "learning_rate": 3.053497942386832e-05, | |
| "loss": 0.6466, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.3555555555555556, | |
| "grad_norm": 686136.625, | |
| "learning_rate": 3.0493827160493827e-05, | |
| "loss": 0.5953, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.3577777777777778, | |
| "grad_norm": 606737.0625, | |
| "learning_rate": 3.0452674897119343e-05, | |
| "loss": 0.4736, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 1258263.25, | |
| "learning_rate": 3.041152263374486e-05, | |
| "loss": 0.5339, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.3622222222222222, | |
| "grad_norm": 1875020.75, | |
| "learning_rate": 3.037037037037037e-05, | |
| "loss": 0.5166, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.3644444444444446, | |
| "grad_norm": 249718.53125, | |
| "learning_rate": 3.0329218106995887e-05, | |
| "loss": 0.7374, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.3666666666666667, | |
| "grad_norm": 245895.125, | |
| "learning_rate": 3.02880658436214e-05, | |
| "loss": 0.5157, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.3688888888888888, | |
| "grad_norm": 945831.4375, | |
| "learning_rate": 3.0246913580246916e-05, | |
| "loss": 0.5125, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.3711111111111112, | |
| "grad_norm": 1526112.375, | |
| "learning_rate": 3.0205761316872428e-05, | |
| "loss": 0.5882, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.3733333333333333, | |
| "grad_norm": 492628.59375, | |
| "learning_rate": 3.0164609053497944e-05, | |
| "loss": 0.7338, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.3755555555555556, | |
| "grad_norm": 1887908.0, | |
| "learning_rate": 3.012345679012346e-05, | |
| "loss": 0.5683, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.3777777777777778, | |
| "grad_norm": 4119723.5, | |
| "learning_rate": 3.0082304526748972e-05, | |
| "loss": 0.4499, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 196303.25, | |
| "learning_rate": 3.0041152263374488e-05, | |
| "loss": 0.5139, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.3822222222222222, | |
| "grad_norm": 402786.03125, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5233, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.3844444444444444, | |
| "grad_norm": 1017483.0, | |
| "learning_rate": 2.9958847736625517e-05, | |
| "loss": 0.5011, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.3866666666666667, | |
| "grad_norm": 488110.4375, | |
| "learning_rate": 2.991769547325103e-05, | |
| "loss": 0.5281, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "grad_norm": 975504.0, | |
| "learning_rate": 2.9876543209876545e-05, | |
| "loss": 0.4671, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.3911111111111112, | |
| "grad_norm": 373597.0625, | |
| "learning_rate": 2.9835390946502057e-05, | |
| "loss": 0.4774, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.3933333333333333, | |
| "grad_norm": 388159.65625, | |
| "learning_rate": 2.9794238683127573e-05, | |
| "loss": 0.6625, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.3955555555555557, | |
| "grad_norm": 784697.4375, | |
| "learning_rate": 2.975308641975309e-05, | |
| "loss": 0.5849, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.3977777777777778, | |
| "grad_norm": 379587.84375, | |
| "learning_rate": 2.97119341563786e-05, | |
| "loss": 0.4771, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 2868313.5, | |
| "learning_rate": 2.9670781893004117e-05, | |
| "loss": 0.5185, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4022222222222223, | |
| "grad_norm": 1386558.5, | |
| "learning_rate": 2.962962962962963e-05, | |
| "loss": 0.5149, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.4044444444444444, | |
| "grad_norm": 368961.875, | |
| "learning_rate": 2.9588477366255146e-05, | |
| "loss": 0.5174, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.4066666666666667, | |
| "grad_norm": 503174.09375, | |
| "learning_rate": 2.9547325102880658e-05, | |
| "loss": 0.5064, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.4088888888888889, | |
| "grad_norm": 1131053.25, | |
| "learning_rate": 2.9506172839506174e-05, | |
| "loss": 0.7157, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.411111111111111, | |
| "grad_norm": 1685755.625, | |
| "learning_rate": 2.946502057613169e-05, | |
| "loss": 0.4297, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.4133333333333333, | |
| "grad_norm": 462870.75, | |
| "learning_rate": 2.9423868312757202e-05, | |
| "loss": 0.5453, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.4155555555555557, | |
| "grad_norm": 663891.375, | |
| "learning_rate": 2.9382716049382718e-05, | |
| "loss": 0.5529, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.4177777777777778, | |
| "grad_norm": 365754.65625, | |
| "learning_rate": 2.934156378600823e-05, | |
| "loss": 0.652, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 218651.53125, | |
| "learning_rate": 2.9300411522633747e-05, | |
| "loss": 0.6578, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.4222222222222223, | |
| "grad_norm": 701713.8125, | |
| "learning_rate": 2.925925925925926e-05, | |
| "loss": 0.5603, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4244444444444444, | |
| "grad_norm": 571417.1875, | |
| "learning_rate": 2.9218106995884775e-05, | |
| "loss": 0.4952, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.4266666666666667, | |
| "grad_norm": 3248552.75, | |
| "learning_rate": 2.917695473251029e-05, | |
| "loss": 0.6064, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.4288888888888889, | |
| "grad_norm": 284121.59375, | |
| "learning_rate": 2.9135802469135803e-05, | |
| "loss": 0.3896, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.431111111111111, | |
| "grad_norm": 1278210.125, | |
| "learning_rate": 2.909465020576132e-05, | |
| "loss": 0.5072, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.4333333333333333, | |
| "grad_norm": 436054.90625, | |
| "learning_rate": 2.905349794238683e-05, | |
| "loss": 0.6116, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.4355555555555555, | |
| "grad_norm": 1549142.5, | |
| "learning_rate": 2.9012345679012347e-05, | |
| "loss": 0.543, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.4377777777777778, | |
| "grad_norm": 1060773.625, | |
| "learning_rate": 2.897119341563786e-05, | |
| "loss": 0.6623, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 2348947.5, | |
| "learning_rate": 2.8930041152263376e-05, | |
| "loss": 0.5165, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.4422222222222223, | |
| "grad_norm": 732758.875, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 0.5677, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 2372214.75, | |
| "learning_rate": 2.8847736625514404e-05, | |
| "loss": 0.6804, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4466666666666668, | |
| "grad_norm": 340780.15625, | |
| "learning_rate": 2.880658436213992e-05, | |
| "loss": 0.4305, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.448888888888889, | |
| "grad_norm": 168099.875, | |
| "learning_rate": 2.8765432098765432e-05, | |
| "loss": 0.5265, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.451111111111111, | |
| "grad_norm": 1534431.0, | |
| "learning_rate": 2.8724279835390948e-05, | |
| "loss": 0.6421, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.4533333333333334, | |
| "grad_norm": 447703.84375, | |
| "learning_rate": 2.868312757201646e-05, | |
| "loss": 0.5125, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.4555555555555555, | |
| "grad_norm": 195473.515625, | |
| "learning_rate": 2.8641975308641977e-05, | |
| "loss": 0.4829, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.4577777777777778, | |
| "grad_norm": 266345.78125, | |
| "learning_rate": 2.860082304526749e-05, | |
| "loss": 0.4782, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 300344.65625, | |
| "learning_rate": 2.8559670781893005e-05, | |
| "loss": 0.4272, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.462222222222222, | |
| "grad_norm": 420080.15625, | |
| "learning_rate": 2.851851851851852e-05, | |
| "loss": 0.4432, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.4644444444444444, | |
| "grad_norm": 769079.1875, | |
| "learning_rate": 2.8477366255144033e-05, | |
| "loss": 0.4976, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.4666666666666668, | |
| "grad_norm": 625666.25, | |
| "learning_rate": 2.843621399176955e-05, | |
| "loss": 0.6125, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.468888888888889, | |
| "grad_norm": 772571.125, | |
| "learning_rate": 2.839506172839506e-05, | |
| "loss": 0.6447, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.471111111111111, | |
| "grad_norm": 601890.8125, | |
| "learning_rate": 2.8353909465020577e-05, | |
| "loss": 0.6098, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.4733333333333334, | |
| "grad_norm": 1934153.875, | |
| "learning_rate": 2.831275720164609e-05, | |
| "loss": 0.4742, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.4755555555555555, | |
| "grad_norm": 423096.5625, | |
| "learning_rate": 2.8271604938271606e-05, | |
| "loss": 0.6821, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.4777777777777779, | |
| "grad_norm": 312746.1875, | |
| "learning_rate": 2.823045267489712e-05, | |
| "loss": 0.4722, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 931891.6875, | |
| "learning_rate": 2.8189300411522634e-05, | |
| "loss": 0.4514, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.482222222222222, | |
| "grad_norm": 294852.0625, | |
| "learning_rate": 2.814814814814815e-05, | |
| "loss": 0.5212, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.4844444444444445, | |
| "grad_norm": 1102205.625, | |
| "learning_rate": 2.8106995884773662e-05, | |
| "loss": 0.5996, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.4866666666666668, | |
| "grad_norm": 339469.875, | |
| "learning_rate": 2.8065843621399178e-05, | |
| "loss": 0.5537, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.488888888888889, | |
| "grad_norm": 285161.625, | |
| "learning_rate": 2.802469135802469e-05, | |
| "loss": 0.5498, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.491111111111111, | |
| "grad_norm": 613179.125, | |
| "learning_rate": 2.7983539094650207e-05, | |
| "loss": 0.4538, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.4933333333333334, | |
| "grad_norm": 419371.625, | |
| "learning_rate": 2.7942386831275726e-05, | |
| "loss": 0.5355, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.4955555555555555, | |
| "grad_norm": 1493819.25, | |
| "learning_rate": 2.7901234567901235e-05, | |
| "loss": 0.6273, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.4977777777777779, | |
| "grad_norm": 749782.5, | |
| "learning_rate": 2.786008230452675e-05, | |
| "loss": 0.5511, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 417377.625, | |
| "learning_rate": 2.7818930041152263e-05, | |
| "loss": 0.496, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.5022222222222221, | |
| "grad_norm": 446522.75, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.4735, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.5044444444444445, | |
| "grad_norm": 1023378.1875, | |
| "learning_rate": 2.773662551440329e-05, | |
| "loss": 0.6545, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.5066666666666668, | |
| "grad_norm": 620501.0625, | |
| "learning_rate": 2.7695473251028807e-05, | |
| "loss": 0.5341, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.508888888888889, | |
| "grad_norm": 409442.4375, | |
| "learning_rate": 2.765432098765432e-05, | |
| "loss": 0.6423, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.511111111111111, | |
| "grad_norm": 1429912.75, | |
| "learning_rate": 2.7613168724279836e-05, | |
| "loss": 0.5688, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.5133333333333332, | |
| "grad_norm": 851064.125, | |
| "learning_rate": 2.757201646090535e-05, | |
| "loss": 0.6428, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.5155555555555555, | |
| "grad_norm": 222915.484375, | |
| "learning_rate": 2.7530864197530864e-05, | |
| "loss": 0.5159, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.517777777777778, | |
| "grad_norm": 327829.65625, | |
| "learning_rate": 2.748971193415638e-05, | |
| "loss": 0.4942, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 694012.4375, | |
| "learning_rate": 2.7448559670781892e-05, | |
| "loss": 0.6507, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.5222222222222221, | |
| "grad_norm": 1516590.375, | |
| "learning_rate": 2.7407407407407408e-05, | |
| "loss": 0.5754, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.5244444444444445, | |
| "grad_norm": 960898.75, | |
| "learning_rate": 2.736625514403292e-05, | |
| "loss": 0.5405, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.5266666666666666, | |
| "grad_norm": 1006265.8125, | |
| "learning_rate": 2.7325102880658437e-05, | |
| "loss": 0.5338, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.528888888888889, | |
| "grad_norm": 329611.46875, | |
| "learning_rate": 2.7283950617283956e-05, | |
| "loss": 0.4846, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.531111111111111, | |
| "grad_norm": 314864.375, | |
| "learning_rate": 2.7242798353909465e-05, | |
| "loss": 0.4881, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.5333333333333332, | |
| "grad_norm": 555823.1875, | |
| "learning_rate": 2.720164609053498e-05, | |
| "loss": 0.5142, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5355555555555556, | |
| "grad_norm": 975172.6875, | |
| "learning_rate": 2.7160493827160493e-05, | |
| "loss": 0.4287, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.537777777777778, | |
| "grad_norm": 698330.1875, | |
| "learning_rate": 2.711934156378601e-05, | |
| "loss": 0.5588, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 911089.25, | |
| "learning_rate": 2.707818930041152e-05, | |
| "loss": 0.5083, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.5422222222222222, | |
| "grad_norm": 264078.0625, | |
| "learning_rate": 2.7037037037037037e-05, | |
| "loss": 0.5387, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.5444444444444443, | |
| "grad_norm": 852672.875, | |
| "learning_rate": 2.6995884773662557e-05, | |
| "loss": 0.5044, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.5466666666666666, | |
| "grad_norm": 455226.28125, | |
| "learning_rate": 2.6954732510288066e-05, | |
| "loss": 0.4886, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.548888888888889, | |
| "grad_norm": 522844.03125, | |
| "learning_rate": 2.6913580246913585e-05, | |
| "loss": 0.5363, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.551111111111111, | |
| "grad_norm": 129947.859375, | |
| "learning_rate": 2.6872427983539094e-05, | |
| "loss": 0.6613, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.5533333333333332, | |
| "grad_norm": 890144.8125, | |
| "learning_rate": 2.683127572016461e-05, | |
| "loss": 0.6588, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 371911.78125, | |
| "learning_rate": 2.6790123456790122e-05, | |
| "loss": 0.4451, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.557777777777778, | |
| "grad_norm": 130950.0078125, | |
| "learning_rate": 2.6748971193415638e-05, | |
| "loss": 0.4966, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 142102.953125, | |
| "learning_rate": 2.6707818930041158e-05, | |
| "loss": 0.4307, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.5622222222222222, | |
| "grad_norm": 359522.21875, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.5764, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.5644444444444443, | |
| "grad_norm": 154281.8125, | |
| "learning_rate": 2.6625514403292186e-05, | |
| "loss": 0.4768, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.5666666666666667, | |
| "grad_norm": 806375.1875, | |
| "learning_rate": 2.6584362139917695e-05, | |
| "loss": 0.6178, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.568888888888889, | |
| "grad_norm": 648758.4375, | |
| "learning_rate": 2.654320987654321e-05, | |
| "loss": 0.6353, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.5711111111111111, | |
| "grad_norm": 582554.125, | |
| "learning_rate": 2.6502057613168723e-05, | |
| "loss": 0.5469, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.5733333333333333, | |
| "grad_norm": 1223310.5, | |
| "learning_rate": 2.646090534979424e-05, | |
| "loss": 0.6492, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.5755555555555556, | |
| "grad_norm": 991635.6875, | |
| "learning_rate": 2.641975308641975e-05, | |
| "loss": 0.468, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.5777777777777777, | |
| "grad_norm": 1111512.25, | |
| "learning_rate": 2.6378600823045267e-05, | |
| "loss": 0.5682, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 179395.46875, | |
| "learning_rate": 2.6337448559670787e-05, | |
| "loss": 0.562, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.5822222222222222, | |
| "grad_norm": 380546.03125, | |
| "learning_rate": 2.6296296296296296e-05, | |
| "loss": 0.5148, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.5844444444444443, | |
| "grad_norm": 2229058.75, | |
| "learning_rate": 2.6255144032921815e-05, | |
| "loss": 0.6432, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.5866666666666667, | |
| "grad_norm": 765920.0625, | |
| "learning_rate": 2.6213991769547324e-05, | |
| "loss": 0.4749, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.588888888888889, | |
| "grad_norm": 585911.75, | |
| "learning_rate": 2.617283950617284e-05, | |
| "loss": 0.5296, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.5911111111111111, | |
| "grad_norm": 2377990.25, | |
| "learning_rate": 2.6131687242798352e-05, | |
| "loss": 0.5784, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.5933333333333333, | |
| "grad_norm": 1983418.125, | |
| "learning_rate": 2.6090534979423868e-05, | |
| "loss": 0.6722, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.5955555555555554, | |
| "grad_norm": 636937.1875, | |
| "learning_rate": 2.6049382716049388e-05, | |
| "loss": 0.5298, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.5977777777777777, | |
| "grad_norm": 819429.75, | |
| "learning_rate": 2.6008230452674897e-05, | |
| "loss": 0.5188, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 346863.21875, | |
| "learning_rate": 2.5967078189300416e-05, | |
| "loss": 0.5294, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6022222222222222, | |
| "grad_norm": 1708457.25, | |
| "learning_rate": 2.5925925925925925e-05, | |
| "loss": 0.4534, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.6044444444444443, | |
| "grad_norm": 264422.75, | |
| "learning_rate": 2.5884773662551444e-05, | |
| "loss": 0.5927, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.6066666666666667, | |
| "grad_norm": 782417.5625, | |
| "learning_rate": 2.5843621399176953e-05, | |
| "loss": 0.5819, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.608888888888889, | |
| "grad_norm": 541727.5, | |
| "learning_rate": 2.580246913580247e-05, | |
| "loss": 0.5413, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.6111111111111112, | |
| "grad_norm": 362651.84375, | |
| "learning_rate": 2.576131687242799e-05, | |
| "loss": 0.5311, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.6133333333333333, | |
| "grad_norm": 302592.6875, | |
| "learning_rate": 2.5720164609053497e-05, | |
| "loss": 0.6317, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.6155555555555554, | |
| "grad_norm": 4282987.0, | |
| "learning_rate": 2.5679012345679017e-05, | |
| "loss": 0.5404, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.6177777777777778, | |
| "grad_norm": 388880.84375, | |
| "learning_rate": 2.5637860082304526e-05, | |
| "loss": 0.5138, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 915515.4375, | |
| "learning_rate": 2.5596707818930045e-05, | |
| "loss": 0.5676, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.6222222222222222, | |
| "grad_norm": 1237556.5, | |
| "learning_rate": 2.5555555555555554e-05, | |
| "loss": 0.5387, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6244444444444444, | |
| "grad_norm": 648158.125, | |
| "learning_rate": 2.551440329218107e-05, | |
| "loss": 0.362, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.6266666666666667, | |
| "grad_norm": 4585280.0, | |
| "learning_rate": 2.5473251028806582e-05, | |
| "loss": 0.6261, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.628888888888889, | |
| "grad_norm": 812792.9375, | |
| "learning_rate": 2.5432098765432098e-05, | |
| "loss": 0.5417, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.6311111111111112, | |
| "grad_norm": 600218.625, | |
| "learning_rate": 2.5390946502057617e-05, | |
| "loss": 0.579, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.6333333333333333, | |
| "grad_norm": 125346.09375, | |
| "learning_rate": 2.5349794238683127e-05, | |
| "loss": 0.542, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.6355555555555554, | |
| "grad_norm": 688033.3125, | |
| "learning_rate": 2.5308641975308646e-05, | |
| "loss": 0.5469, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.6377777777777778, | |
| "grad_norm": 816828.375, | |
| "learning_rate": 2.5267489711934155e-05, | |
| "loss": 0.5175, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.6400000000000001, | |
| "grad_norm": 1660627.625, | |
| "learning_rate": 2.5226337448559674e-05, | |
| "loss": 0.5644, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.6422222222222222, | |
| "grad_norm": 727887.4375, | |
| "learning_rate": 2.5185185185185183e-05, | |
| "loss": 0.689, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.6444444444444444, | |
| "grad_norm": 637634.625, | |
| "learning_rate": 2.51440329218107e-05, | |
| "loss": 0.5897, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6466666666666665, | |
| "grad_norm": 457325.75, | |
| "learning_rate": 2.510288065843622e-05, | |
| "loss": 0.6766, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.6488888888888888, | |
| "grad_norm": 250656.8125, | |
| "learning_rate": 2.5061728395061727e-05, | |
| "loss": 0.5797, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.6511111111111112, | |
| "grad_norm": 667376.6875, | |
| "learning_rate": 2.5020576131687247e-05, | |
| "loss": 0.4846, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.6533333333333333, | |
| "grad_norm": 203570.921875, | |
| "learning_rate": 2.497942386831276e-05, | |
| "loss": 0.3637, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.6555555555555554, | |
| "grad_norm": 757491.5625, | |
| "learning_rate": 2.4938271604938275e-05, | |
| "loss": 0.5933, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.6577777777777778, | |
| "grad_norm": 258822.21875, | |
| "learning_rate": 2.4897119341563787e-05, | |
| "loss": 0.4402, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.6600000000000001, | |
| "grad_norm": 404580.1875, | |
| "learning_rate": 2.4855967078189303e-05, | |
| "loss": 0.4108, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.6622222222222223, | |
| "grad_norm": 189135.4375, | |
| "learning_rate": 2.4814814814814816e-05, | |
| "loss": 0.481, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.6644444444444444, | |
| "grad_norm": 365586.9375, | |
| "learning_rate": 2.4773662551440328e-05, | |
| "loss": 0.5955, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 637579.75, | |
| "learning_rate": 2.4732510288065844e-05, | |
| "loss": 0.6268, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.6688888888888889, | |
| "grad_norm": 234886.171875, | |
| "learning_rate": 2.4691358024691357e-05, | |
| "loss": 0.431, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.6711111111111112, | |
| "grad_norm": 641196.1875, | |
| "learning_rate": 2.4650205761316876e-05, | |
| "loss": 0.4341, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.6733333333333333, | |
| "grad_norm": 269056.125, | |
| "learning_rate": 2.4609053497942388e-05, | |
| "loss": 0.4744, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.6755555555555555, | |
| "grad_norm": 2218215.0, | |
| "learning_rate": 2.4567901234567904e-05, | |
| "loss": 0.604, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.6777777777777778, | |
| "grad_norm": 695057.75, | |
| "learning_rate": 2.4526748971193417e-05, | |
| "loss": 0.6412, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 810964.0625, | |
| "learning_rate": 2.4485596707818932e-05, | |
| "loss": 0.5174, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.6822222222222223, | |
| "grad_norm": 1622732.875, | |
| "learning_rate": 2.4444444444444445e-05, | |
| "loss": 0.4582, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.6844444444444444, | |
| "grad_norm": 362912.90625, | |
| "learning_rate": 2.4403292181069957e-05, | |
| "loss": 0.583, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.6866666666666665, | |
| "grad_norm": 2453790.75, | |
| "learning_rate": 2.4362139917695477e-05, | |
| "loss": 0.5999, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.6888888888888889, | |
| "grad_norm": 334428.9375, | |
| "learning_rate": 2.432098765432099e-05, | |
| "loss": 0.5055, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.6911111111111112, | |
| "grad_norm": 993397.0, | |
| "learning_rate": 2.4279835390946505e-05, | |
| "loss": 0.6, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.6933333333333334, | |
| "grad_norm": 387189.09375, | |
| "learning_rate": 2.4238683127572017e-05, | |
| "loss": 0.6245, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.6955555555555555, | |
| "grad_norm": 1040618.1875, | |
| "learning_rate": 2.4197530864197533e-05, | |
| "loss": 0.4439, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.6977777777777778, | |
| "grad_norm": 855744.9375, | |
| "learning_rate": 2.4156378600823046e-05, | |
| "loss": 0.4944, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 1253714.5, | |
| "learning_rate": 2.4115226337448558e-05, | |
| "loss": 0.7158, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.7022222222222223, | |
| "grad_norm": 692958.8125, | |
| "learning_rate": 2.4074074074074074e-05, | |
| "loss": 0.4465, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.7044444444444444, | |
| "grad_norm": 1175609.875, | |
| "learning_rate": 2.403292181069959e-05, | |
| "loss": 0.5385, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.7066666666666666, | |
| "grad_norm": 649715.375, | |
| "learning_rate": 2.3991769547325106e-05, | |
| "loss": 0.4884, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.708888888888889, | |
| "grad_norm": 309797.25, | |
| "learning_rate": 2.3950617283950618e-05, | |
| "loss": 0.4184, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.7111111111111112, | |
| "grad_norm": 302643.53125, | |
| "learning_rate": 2.3909465020576134e-05, | |
| "loss": 0.5094, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7133333333333334, | |
| "grad_norm": 679658.9375, | |
| "learning_rate": 2.3868312757201647e-05, | |
| "loss": 0.4653, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.7155555555555555, | |
| "grad_norm": 383316.59375, | |
| "learning_rate": 2.3827160493827162e-05, | |
| "loss": 0.5941, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.7177777777777776, | |
| "grad_norm": 450449.125, | |
| "learning_rate": 2.3786008230452675e-05, | |
| "loss": 0.6031, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 193325.40625, | |
| "learning_rate": 2.374485596707819e-05, | |
| "loss": 0.5443, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.7222222222222223, | |
| "grad_norm": 1077144.75, | |
| "learning_rate": 2.3703703703703707e-05, | |
| "loss": 0.5123, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.7244444444444444, | |
| "grad_norm": 449495.15625, | |
| "learning_rate": 2.366255144032922e-05, | |
| "loss": 0.5594, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.7266666666666666, | |
| "grad_norm": 319506.9375, | |
| "learning_rate": 2.3621399176954735e-05, | |
| "loss": 0.5426, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.728888888888889, | |
| "grad_norm": 1278679.125, | |
| "learning_rate": 2.3580246913580247e-05, | |
| "loss": 0.6726, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.7311111111111113, | |
| "grad_norm": 897134.25, | |
| "learning_rate": 2.3539094650205763e-05, | |
| "loss": 0.5594, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.7333333333333334, | |
| "grad_norm": 427793.71875, | |
| "learning_rate": 2.3497942386831276e-05, | |
| "loss": 0.5476, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7355555555555555, | |
| "grad_norm": 359693.71875, | |
| "learning_rate": 2.345679012345679e-05, | |
| "loss": 0.4278, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.7377777777777776, | |
| "grad_norm": 678628.125, | |
| "learning_rate": 2.3415637860082307e-05, | |
| "loss": 0.5121, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 823664.75, | |
| "learning_rate": 2.337448559670782e-05, | |
| "loss": 0.4467, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.7422222222222223, | |
| "grad_norm": 556514.875, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 0.6415, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.7444444444444445, | |
| "grad_norm": 844012.6875, | |
| "learning_rate": 2.3292181069958848e-05, | |
| "loss": 0.5389, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.7466666666666666, | |
| "grad_norm": 731721.375, | |
| "learning_rate": 2.3251028806584364e-05, | |
| "loss": 0.4813, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.748888888888889, | |
| "grad_norm": 657326.875, | |
| "learning_rate": 2.3209876543209877e-05, | |
| "loss": 0.6037, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.751111111111111, | |
| "grad_norm": 1010242.3125, | |
| "learning_rate": 2.3168724279835392e-05, | |
| "loss": 0.5776, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.7533333333333334, | |
| "grad_norm": 1569845.625, | |
| "learning_rate": 2.312757201646091e-05, | |
| "loss": 0.5562, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.7555555555555555, | |
| "grad_norm": 446353.375, | |
| "learning_rate": 2.308641975308642e-05, | |
| "loss": 0.6496, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.7577777777777777, | |
| "grad_norm": 860934.6875, | |
| "learning_rate": 2.3045267489711937e-05, | |
| "loss": 0.5168, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 2472300.25, | |
| "learning_rate": 2.300411522633745e-05, | |
| "loss": 0.4787, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.7622222222222224, | |
| "grad_norm": 1831251.25, | |
| "learning_rate": 2.2962962962962965e-05, | |
| "loss": 0.5387, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.7644444444444445, | |
| "grad_norm": 1542738.5, | |
| "learning_rate": 2.2921810699588477e-05, | |
| "loss": 0.5983, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.7666666666666666, | |
| "grad_norm": 1019469.1875, | |
| "learning_rate": 2.2880658436213993e-05, | |
| "loss": 0.6107, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.7688888888888887, | |
| "grad_norm": 392084.78125, | |
| "learning_rate": 2.2839506172839506e-05, | |
| "loss": 0.4184, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.771111111111111, | |
| "grad_norm": 1160610.25, | |
| "learning_rate": 2.279835390946502e-05, | |
| "loss": 0.5625, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.7733333333333334, | |
| "grad_norm": 229940.953125, | |
| "learning_rate": 2.2757201646090537e-05, | |
| "loss": 0.6152, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.7755555555555556, | |
| "grad_norm": 356666.3125, | |
| "learning_rate": 2.271604938271605e-05, | |
| "loss": 0.4933, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 130159.8125, | |
| "learning_rate": 2.2674897119341566e-05, | |
| "loss": 0.5619, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 148995.453125, | |
| "learning_rate": 2.2633744855967078e-05, | |
| "loss": 0.4679, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.7822222222222224, | |
| "grad_norm": 690270.125, | |
| "learning_rate": 2.2592592592592594e-05, | |
| "loss": 0.4568, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.7844444444444445, | |
| "grad_norm": 1709161.25, | |
| "learning_rate": 2.2551440329218107e-05, | |
| "loss": 0.4541, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.7866666666666666, | |
| "grad_norm": 1714657.25, | |
| "learning_rate": 2.2510288065843622e-05, | |
| "loss": 0.4326, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.7888888888888888, | |
| "grad_norm": 1946265.25, | |
| "learning_rate": 2.246913580246914e-05, | |
| "loss": 0.5646, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.791111111111111, | |
| "grad_norm": 872982.875, | |
| "learning_rate": 2.242798353909465e-05, | |
| "loss": 0.4512, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.7933333333333334, | |
| "grad_norm": 929096.4375, | |
| "learning_rate": 2.2386831275720167e-05, | |
| "loss": 0.4812, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.7955555555555556, | |
| "grad_norm": 431499.1875, | |
| "learning_rate": 2.234567901234568e-05, | |
| "loss": 0.5798, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.7977777777777777, | |
| "grad_norm": 837512.3125, | |
| "learning_rate": 2.2304526748971195e-05, | |
| "loss": 0.5095, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 446406.6875, | |
| "learning_rate": 2.2263374485596707e-05, | |
| "loss": 0.5401, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8022222222222222, | |
| "grad_norm": 358855.78125, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.5959, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.8044444444444445, | |
| "grad_norm": 326300.875, | |
| "learning_rate": 2.218106995884774e-05, | |
| "loss": 0.4101, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.8066666666666666, | |
| "grad_norm": 910762.8125, | |
| "learning_rate": 2.213991769547325e-05, | |
| "loss": 0.5932, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.8088888888888888, | |
| "grad_norm": 1639550.125, | |
| "learning_rate": 2.2098765432098767e-05, | |
| "loss": 0.437, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.8111111111111111, | |
| "grad_norm": 443086.21875, | |
| "learning_rate": 2.205761316872428e-05, | |
| "loss": 0.5197, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.8133333333333335, | |
| "grad_norm": 481764.21875, | |
| "learning_rate": 2.2016460905349796e-05, | |
| "loss": 0.4687, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.8155555555555556, | |
| "grad_norm": 395750.53125, | |
| "learning_rate": 2.1975308641975308e-05, | |
| "loss": 0.5638, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.8177777777777777, | |
| "grad_norm": 427531.65625, | |
| "learning_rate": 2.1934156378600824e-05, | |
| "loss": 0.5704, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.8199999999999998, | |
| "grad_norm": 1041394.3125, | |
| "learning_rate": 2.189300411522634e-05, | |
| "loss": 0.5553, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.8222222222222222, | |
| "grad_norm": 368493.375, | |
| "learning_rate": 2.1851851851851852e-05, | |
| "loss": 0.4636, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8244444444444445, | |
| "grad_norm": 911481.6875, | |
| "learning_rate": 2.1810699588477368e-05, | |
| "loss": 0.4623, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.8266666666666667, | |
| "grad_norm": 772321.5, | |
| "learning_rate": 2.176954732510288e-05, | |
| "loss": 0.619, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.8288888888888888, | |
| "grad_norm": 2411277.0, | |
| "learning_rate": 2.1728395061728397e-05, | |
| "loss": 0.4113, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.8311111111111111, | |
| "grad_norm": 282978.75, | |
| "learning_rate": 2.168724279835391e-05, | |
| "loss": 0.5235, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 369288.125, | |
| "learning_rate": 2.1646090534979425e-05, | |
| "loss": 0.4668, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.8355555555555556, | |
| "grad_norm": 427154.0625, | |
| "learning_rate": 2.1604938271604937e-05, | |
| "loss": 0.5174, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.8377777777777777, | |
| "grad_norm": 590274.0625, | |
| "learning_rate": 2.1563786008230453e-05, | |
| "loss": 0.6154, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 479509.1875, | |
| "learning_rate": 2.152263374485597e-05, | |
| "loss": 0.4884, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.8422222222222222, | |
| "grad_norm": 627632.5625, | |
| "learning_rate": 2.148148148148148e-05, | |
| "loss": 0.5925, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.8444444444444446, | |
| "grad_norm": 126343.203125, | |
| "learning_rate": 2.1440329218106997e-05, | |
| "loss": 0.4947, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.8466666666666667, | |
| "grad_norm": 662519.25, | |
| "learning_rate": 2.139917695473251e-05, | |
| "loss": 0.4442, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.8488888888888888, | |
| "grad_norm": 1551904.25, | |
| "learning_rate": 2.1358024691358026e-05, | |
| "loss": 0.4669, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.8511111111111112, | |
| "grad_norm": 495174.375, | |
| "learning_rate": 2.1316872427983538e-05, | |
| "loss": 0.4312, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.8533333333333335, | |
| "grad_norm": 500009.3125, | |
| "learning_rate": 2.1275720164609054e-05, | |
| "loss": 0.4621, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.8555555555555556, | |
| "grad_norm": 1010434.4375, | |
| "learning_rate": 2.123456790123457e-05, | |
| "loss": 0.5229, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.8577777777777778, | |
| "grad_norm": 265641.6875, | |
| "learning_rate": 2.1193415637860082e-05, | |
| "loss": 0.5296, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.8599999999999999, | |
| "grad_norm": 795082.4375, | |
| "learning_rate": 2.1152263374485598e-05, | |
| "loss": 0.5857, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.8622222222222222, | |
| "grad_norm": 838311.375, | |
| "learning_rate": 2.111111111111111e-05, | |
| "loss": 0.4881, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.8644444444444446, | |
| "grad_norm": 978832.3125, | |
| "learning_rate": 2.1069958847736627e-05, | |
| "loss": 0.5223, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.8666666666666667, | |
| "grad_norm": 1003945.3125, | |
| "learning_rate": 2.102880658436214e-05, | |
| "loss": 0.4662, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.8688888888888888, | |
| "grad_norm": 3484293.5, | |
| "learning_rate": 2.0987654320987655e-05, | |
| "loss": 0.7073, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.871111111111111, | |
| "grad_norm": 544340.5, | |
| "learning_rate": 2.094650205761317e-05, | |
| "loss": 0.7396, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.8733333333333333, | |
| "grad_norm": 954412.9375, | |
| "learning_rate": 2.0905349794238687e-05, | |
| "loss": 0.5633, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.8755555555555556, | |
| "grad_norm": 1018313.875, | |
| "learning_rate": 2.08641975308642e-05, | |
| "loss": 0.5066, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.8777777777777778, | |
| "grad_norm": 2038616.875, | |
| "learning_rate": 2.082304526748971e-05, | |
| "loss": 0.7643, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 788535.3125, | |
| "learning_rate": 2.0781893004115227e-05, | |
| "loss": 0.5479, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.8822222222222222, | |
| "grad_norm": 1077129.875, | |
| "learning_rate": 2.074074074074074e-05, | |
| "loss": 0.5288, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.8844444444444446, | |
| "grad_norm": 944732.9375, | |
| "learning_rate": 2.0699588477366256e-05, | |
| "loss": 0.5459, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.8866666666666667, | |
| "grad_norm": 692355.9375, | |
| "learning_rate": 2.0658436213991768e-05, | |
| "loss": 0.5624, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 788133.9375, | |
| "learning_rate": 2.0617283950617287e-05, | |
| "loss": 0.5595, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.891111111111111, | |
| "grad_norm": 1503811.75, | |
| "learning_rate": 2.05761316872428e-05, | |
| "loss": 0.4503, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.8933333333333333, | |
| "grad_norm": 608034.5625, | |
| "learning_rate": 2.0534979423868312e-05, | |
| "loss": 0.597, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.8955555555555557, | |
| "grad_norm": 432831.4375, | |
| "learning_rate": 2.0493827160493828e-05, | |
| "loss": 0.5731, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.8977777777777778, | |
| "grad_norm": 2556177.75, | |
| "learning_rate": 2.045267489711934e-05, | |
| "loss": 0.455, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 818600.25, | |
| "learning_rate": 2.0411522633744857e-05, | |
| "loss": 0.5477, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.9022222222222223, | |
| "grad_norm": 702948.9375, | |
| "learning_rate": 2.037037037037037e-05, | |
| "loss": 0.5564, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.9044444444444446, | |
| "grad_norm": 352416.96875, | |
| "learning_rate": 2.032921810699589e-05, | |
| "loss": 0.479, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.9066666666666667, | |
| "grad_norm": 641082.25, | |
| "learning_rate": 2.02880658436214e-05, | |
| "loss": 0.5326, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.9088888888888889, | |
| "grad_norm": 1330086.625, | |
| "learning_rate": 2.0246913580246917e-05, | |
| "loss": 0.5112, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.911111111111111, | |
| "grad_norm": 402839.9375, | |
| "learning_rate": 2.020576131687243e-05, | |
| "loss": 0.4653, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9133333333333333, | |
| "grad_norm": 1415979.0, | |
| "learning_rate": 2.016460905349794e-05, | |
| "loss": 0.5702, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.9155555555555557, | |
| "grad_norm": 582900.9375, | |
| "learning_rate": 2.0123456790123457e-05, | |
| "loss": 0.5496, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.9177777777777778, | |
| "grad_norm": 1254026.625, | |
| "learning_rate": 2.008230452674897e-05, | |
| "loss": 0.5884, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 681267.3125, | |
| "learning_rate": 2.0041152263374486e-05, | |
| "loss": 0.5028, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.9222222222222223, | |
| "grad_norm": 639010.25, | |
| "learning_rate": 2e-05, | |
| "loss": 0.5437, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.9244444444444444, | |
| "grad_norm": 750335.0, | |
| "learning_rate": 1.9958847736625517e-05, | |
| "loss": 0.5413, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.9266666666666667, | |
| "grad_norm": 395516.53125, | |
| "learning_rate": 1.991769547325103e-05, | |
| "loss": 0.453, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.9288888888888889, | |
| "grad_norm": 496803.71875, | |
| "learning_rate": 1.9876543209876546e-05, | |
| "loss": 0.487, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.931111111111111, | |
| "grad_norm": 1806078.25, | |
| "learning_rate": 1.9835390946502058e-05, | |
| "loss": 0.5195, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.9333333333333333, | |
| "grad_norm": 424641.3125, | |
| "learning_rate": 1.979423868312757e-05, | |
| "loss": 0.4678, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.9355555555555557, | |
| "grad_norm": 1400120.0, | |
| "learning_rate": 1.9753086419753087e-05, | |
| "loss": 0.4432, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.9377777777777778, | |
| "grad_norm": 887819.125, | |
| "learning_rate": 1.9711934156378602e-05, | |
| "loss": 0.5575, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 216944.609375, | |
| "learning_rate": 1.967078189300412e-05, | |
| "loss": 0.5241, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.942222222222222, | |
| "grad_norm": 963737.4375, | |
| "learning_rate": 1.962962962962963e-05, | |
| "loss": 0.5449, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.9444444444444444, | |
| "grad_norm": 656178.6875, | |
| "learning_rate": 1.9588477366255147e-05, | |
| "loss": 0.5254, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.9466666666666668, | |
| "grad_norm": 601821.625, | |
| "learning_rate": 1.954732510288066e-05, | |
| "loss": 0.4921, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.948888888888889, | |
| "grad_norm": 2348228.75, | |
| "learning_rate": 1.950617283950617e-05, | |
| "loss": 0.4908, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.951111111111111, | |
| "grad_norm": 777158.4375, | |
| "learning_rate": 1.9465020576131687e-05, | |
| "loss": 0.4783, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.9533333333333334, | |
| "grad_norm": 565114.375, | |
| "learning_rate": 1.94238683127572e-05, | |
| "loss": 0.6376, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.9555555555555557, | |
| "grad_norm": 549776.4375, | |
| "learning_rate": 1.938271604938272e-05, | |
| "loss": 0.5003, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9577777777777778, | |
| "grad_norm": 470974.875, | |
| "learning_rate": 1.934156378600823e-05, | |
| "loss": 0.5577, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 988247.875, | |
| "learning_rate": 1.9300411522633747e-05, | |
| "loss": 0.5346, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.962222222222222, | |
| "grad_norm": 555580.3125, | |
| "learning_rate": 1.925925925925926e-05, | |
| "loss": 0.5311, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.9644444444444444, | |
| "grad_norm": 371100.03125, | |
| "learning_rate": 1.9218106995884776e-05, | |
| "loss": 0.5582, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.9666666666666668, | |
| "grad_norm": 426462.53125, | |
| "learning_rate": 1.9176954732510288e-05, | |
| "loss": 0.5114, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.968888888888889, | |
| "grad_norm": 560246.5625, | |
| "learning_rate": 1.91358024691358e-05, | |
| "loss": 0.5406, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.971111111111111, | |
| "grad_norm": 676496.4375, | |
| "learning_rate": 1.909465020576132e-05, | |
| "loss": 0.636, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.9733333333333334, | |
| "grad_norm": 942200.4375, | |
| "learning_rate": 1.9053497942386832e-05, | |
| "loss": 0.6124, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.9755555555555555, | |
| "grad_norm": 986006.3125, | |
| "learning_rate": 1.901234567901235e-05, | |
| "loss": 0.6272, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.9777777777777779, | |
| "grad_norm": 415790.6875, | |
| "learning_rate": 1.897119341563786e-05, | |
| "loss": 0.5431, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 415508.59375, | |
| "learning_rate": 1.8930041152263377e-05, | |
| "loss": 0.5771, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.982222222222222, | |
| "grad_norm": 364349.125, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 0.4451, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.9844444444444445, | |
| "grad_norm": 357926.125, | |
| "learning_rate": 1.8847736625514405e-05, | |
| "loss": 0.5708, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.9866666666666668, | |
| "grad_norm": 648236.625, | |
| "learning_rate": 1.8806584362139917e-05, | |
| "loss": 0.6643, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.988888888888889, | |
| "grad_norm": 442882.90625, | |
| "learning_rate": 1.8765432098765433e-05, | |
| "loss": 0.4524, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.991111111111111, | |
| "grad_norm": 661978.375, | |
| "learning_rate": 1.872427983539095e-05, | |
| "loss": 0.5812, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.9933333333333332, | |
| "grad_norm": 1080674.0, | |
| "learning_rate": 1.868312757201646e-05, | |
| "loss": 0.6168, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.9955555555555555, | |
| "grad_norm": 2142185.5, | |
| "learning_rate": 1.8641975308641977e-05, | |
| "loss": 0.4752, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.9977777777777779, | |
| "grad_norm": 487222.90625, | |
| "learning_rate": 1.860082304526749e-05, | |
| "loss": 0.6078, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 399853.9375, | |
| "learning_rate": 1.8559670781893006e-05, | |
| "loss": 0.4525, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.002222222222222, | |
| "grad_norm": 442879.53125, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.48, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.0044444444444443, | |
| "grad_norm": 463064.875, | |
| "learning_rate": 1.8477366255144034e-05, | |
| "loss": 0.5115, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.006666666666667, | |
| "grad_norm": 452375.40625, | |
| "learning_rate": 1.843621399176955e-05, | |
| "loss": 0.6397, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.008888888888889, | |
| "grad_norm": 694365.6875, | |
| "learning_rate": 1.8395061728395062e-05, | |
| "loss": 0.5129, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.011111111111111, | |
| "grad_norm": 380162.875, | |
| "learning_rate": 1.835390946502058e-05, | |
| "loss": 0.486, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.013333333333333, | |
| "grad_norm": 563086.0625, | |
| "learning_rate": 1.831275720164609e-05, | |
| "loss": 0.5581, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.0155555555555558, | |
| "grad_norm": 344576.40625, | |
| "learning_rate": 1.8271604938271607e-05, | |
| "loss": 0.57, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.017777777777778, | |
| "grad_norm": 398236.9375, | |
| "learning_rate": 1.823045267489712e-05, | |
| "loss": 0.5709, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 814398.9375, | |
| "learning_rate": 1.8189300411522635e-05, | |
| "loss": 0.4834, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.022222222222222, | |
| "grad_norm": 492153.21875, | |
| "learning_rate": 1.814814814814815e-05, | |
| "loss": 0.7421, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.0244444444444443, | |
| "grad_norm": 164438.71875, | |
| "learning_rate": 1.8106995884773663e-05, | |
| "loss": 0.4633, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.026666666666667, | |
| "grad_norm": 460060.9375, | |
| "learning_rate": 1.806584362139918e-05, | |
| "loss": 0.4765, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.028888888888889, | |
| "grad_norm": 1290432.625, | |
| "learning_rate": 1.802469135802469e-05, | |
| "loss": 0.6418, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.031111111111111, | |
| "grad_norm": 704266.6875, | |
| "learning_rate": 1.7983539094650207e-05, | |
| "loss": 0.4835, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.033333333333333, | |
| "grad_norm": 505275.875, | |
| "learning_rate": 1.794238683127572e-05, | |
| "loss": 0.5829, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.0355555555555553, | |
| "grad_norm": 1274834.25, | |
| "learning_rate": 1.7901234567901236e-05, | |
| "loss": 0.5759, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.037777777777778, | |
| "grad_norm": 615392.875, | |
| "learning_rate": 1.7860082304526748e-05, | |
| "loss": 0.5188, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 198756.796875, | |
| "learning_rate": 1.7818930041152264e-05, | |
| "loss": 0.5418, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.042222222222222, | |
| "grad_norm": 687675.1875, | |
| "learning_rate": 1.777777777777778e-05, | |
| "loss": 0.6929, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.0444444444444443, | |
| "grad_norm": 552092.4375, | |
| "learning_rate": 1.7736625514403292e-05, | |
| "loss": 0.4773, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.046666666666667, | |
| "grad_norm": 497010.9375, | |
| "learning_rate": 1.769547325102881e-05, | |
| "loss": 0.4841, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.048888888888889, | |
| "grad_norm": 435989.84375, | |
| "learning_rate": 1.765432098765432e-05, | |
| "loss": 0.4522, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.051111111111111, | |
| "grad_norm": 869393.125, | |
| "learning_rate": 1.7613168724279837e-05, | |
| "loss": 0.6985, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.0533333333333332, | |
| "grad_norm": 385168.8125, | |
| "learning_rate": 1.757201646090535e-05, | |
| "loss": 0.5334, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.0555555555555554, | |
| "grad_norm": 831693.0, | |
| "learning_rate": 1.7530864197530865e-05, | |
| "loss": 0.4533, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.057777777777778, | |
| "grad_norm": 1018997.9375, | |
| "learning_rate": 1.748971193415638e-05, | |
| "loss": 0.5729, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 691982.25, | |
| "learning_rate": 1.7448559670781893e-05, | |
| "loss": 0.5815, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.062222222222222, | |
| "grad_norm": 280133.25, | |
| "learning_rate": 1.740740740740741e-05, | |
| "loss": 0.4563, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.0644444444444443, | |
| "grad_norm": 340764.8125, | |
| "learning_rate": 1.736625514403292e-05, | |
| "loss": 0.6178, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.066666666666667, | |
| "grad_norm": 629493.5, | |
| "learning_rate": 1.7325102880658437e-05, | |
| "loss": 0.6427, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.068888888888889, | |
| "grad_norm": 781840.5, | |
| "learning_rate": 1.728395061728395e-05, | |
| "loss": 0.7296, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.071111111111111, | |
| "grad_norm": 267660.0625, | |
| "learning_rate": 1.7242798353909466e-05, | |
| "loss": 0.6227, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.0733333333333333, | |
| "grad_norm": 436358.3125, | |
| "learning_rate": 1.720164609053498e-05, | |
| "loss": 0.5042, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.0755555555555554, | |
| "grad_norm": 1105647.875, | |
| "learning_rate": 1.7160493827160494e-05, | |
| "loss": 0.5156, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.077777777777778, | |
| "grad_norm": 451249.875, | |
| "learning_rate": 1.711934156378601e-05, | |
| "loss": 0.6336, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 826279.0625, | |
| "learning_rate": 1.7078189300411522e-05, | |
| "loss": 0.462, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.082222222222222, | |
| "grad_norm": 433239.28125, | |
| "learning_rate": 1.7037037037037038e-05, | |
| "loss": 0.5149, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.0844444444444443, | |
| "grad_norm": 610665.125, | |
| "learning_rate": 1.699588477366255e-05, | |
| "loss": 0.6512, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.086666666666667, | |
| "grad_norm": 784074.75, | |
| "learning_rate": 1.6954732510288067e-05, | |
| "loss": 0.5048, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.088888888888889, | |
| "grad_norm": 440249.28125, | |
| "learning_rate": 1.6913580246913582e-05, | |
| "loss": 0.5385, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.091111111111111, | |
| "grad_norm": 248265.953125, | |
| "learning_rate": 1.6872427983539095e-05, | |
| "loss": 0.4792, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.0933333333333333, | |
| "grad_norm": 4933805.5, | |
| "learning_rate": 1.683127572016461e-05, | |
| "loss": 0.5055, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.0955555555555554, | |
| "grad_norm": 368027.625, | |
| "learning_rate": 1.6790123456790123e-05, | |
| "loss": 0.4709, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.097777777777778, | |
| "grad_norm": 792823.0625, | |
| "learning_rate": 1.674897119341564e-05, | |
| "loss": 0.6333, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 2230348.75, | |
| "learning_rate": 1.670781893004115e-05, | |
| "loss": 0.4773, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.102222222222222, | |
| "grad_norm": 1184062.25, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.6342, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.1044444444444443, | |
| "grad_norm": 1087129.75, | |
| "learning_rate": 1.662551440329218e-05, | |
| "loss": 0.479, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.1066666666666665, | |
| "grad_norm": 400438.1875, | |
| "learning_rate": 1.6584362139917696e-05, | |
| "loss": 0.7179, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.108888888888889, | |
| "grad_norm": 1183529.5, | |
| "learning_rate": 1.654320987654321e-05, | |
| "loss": 0.5167, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.111111111111111, | |
| "grad_norm": 721013.0, | |
| "learning_rate": 1.6502057613168724e-05, | |
| "loss": 0.3956, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.1133333333333333, | |
| "grad_norm": 785808.9375, | |
| "learning_rate": 1.646090534979424e-05, | |
| "loss": 0.4442, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.1155555555555554, | |
| "grad_norm": 224943.34375, | |
| "learning_rate": 1.6419753086419752e-05, | |
| "loss": 0.5059, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.117777777777778, | |
| "grad_norm": 646250.3125, | |
| "learning_rate": 1.6378600823045268e-05, | |
| "loss": 0.6695, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 1041619.8125, | |
| "learning_rate": 1.633744855967078e-05, | |
| "loss": 0.5669, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.1222222222222222, | |
| "grad_norm": 610161.875, | |
| "learning_rate": 1.62962962962963e-05, | |
| "loss": 0.679, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.1244444444444444, | |
| "grad_norm": 175245.484375, | |
| "learning_rate": 1.6255144032921812e-05, | |
| "loss": 0.6749, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.1266666666666665, | |
| "grad_norm": 616233.6875, | |
| "learning_rate": 1.6213991769547325e-05, | |
| "loss": 0.5229, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.128888888888889, | |
| "grad_norm": 449539.5, | |
| "learning_rate": 1.617283950617284e-05, | |
| "loss": 0.6578, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.131111111111111, | |
| "grad_norm": 789648.6875, | |
| "learning_rate": 1.6131687242798353e-05, | |
| "loss": 0.4188, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.1333333333333333, | |
| "grad_norm": 2248975.5, | |
| "learning_rate": 1.609053497942387e-05, | |
| "loss": 0.6399, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.1355555555555554, | |
| "grad_norm": 468038.8125, | |
| "learning_rate": 1.604938271604938e-05, | |
| "loss": 0.4748, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.137777777777778, | |
| "grad_norm": 2706175.25, | |
| "learning_rate": 1.6008230452674897e-05, | |
| "loss": 0.5013, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 545295.0625, | |
| "learning_rate": 1.5967078189300413e-05, | |
| "loss": 0.5944, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.1422222222222222, | |
| "grad_norm": 466568.6875, | |
| "learning_rate": 1.5925925925925926e-05, | |
| "loss": 0.5908, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.1444444444444444, | |
| "grad_norm": 237403.34375, | |
| "learning_rate": 1.588477366255144e-05, | |
| "loss": 0.6484, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.1466666666666665, | |
| "grad_norm": 637861.4375, | |
| "learning_rate": 1.5843621399176954e-05, | |
| "loss": 0.4325, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.148888888888889, | |
| "grad_norm": 232760.671875, | |
| "learning_rate": 1.580246913580247e-05, | |
| "loss": 0.4771, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.151111111111111, | |
| "grad_norm": 811513.5625, | |
| "learning_rate": 1.5761316872427982e-05, | |
| "loss": 0.552, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.1533333333333333, | |
| "grad_norm": 318511.15625, | |
| "learning_rate": 1.5720164609053498e-05, | |
| "loss": 0.5806, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.1555555555555554, | |
| "grad_norm": 183807.328125, | |
| "learning_rate": 1.5679012345679014e-05, | |
| "loss": 0.5115, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.1577777777777776, | |
| "grad_norm": 785010.8125, | |
| "learning_rate": 1.563786008230453e-05, | |
| "loss": 0.6294, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 3169989.25, | |
| "learning_rate": 1.5596707818930042e-05, | |
| "loss": 0.5082, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.1622222222222223, | |
| "grad_norm": 652374.625, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 0.6602, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.1644444444444444, | |
| "grad_norm": 672441.8125, | |
| "learning_rate": 1.551440329218107e-05, | |
| "loss": 0.4875, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 424183.3125, | |
| "learning_rate": 1.5473251028806583e-05, | |
| "loss": 0.5186, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.168888888888889, | |
| "grad_norm": 660900.0625, | |
| "learning_rate": 1.54320987654321e-05, | |
| "loss": 0.5495, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.171111111111111, | |
| "grad_norm": 607010.9375, | |
| "learning_rate": 1.539094650205761e-05, | |
| "loss": 0.4558, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.1733333333333333, | |
| "grad_norm": 542253.1875, | |
| "learning_rate": 1.534979423868313e-05, | |
| "loss": 0.5178, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.1755555555555555, | |
| "grad_norm": 733078.125, | |
| "learning_rate": 1.5308641975308643e-05, | |
| "loss": 0.4317, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.1777777777777776, | |
| "grad_norm": 745732.3125, | |
| "learning_rate": 1.526748971193416e-05, | |
| "loss": 0.5786, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 173377.171875, | |
| "learning_rate": 1.5226337448559672e-05, | |
| "loss": 0.4642, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.1822222222222223, | |
| "grad_norm": 439588.0, | |
| "learning_rate": 1.5185185185185186e-05, | |
| "loss": 0.6901, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.1844444444444444, | |
| "grad_norm": 1793941.5, | |
| "learning_rate": 1.51440329218107e-05, | |
| "loss": 0.5587, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.1866666666666665, | |
| "grad_norm": 197852.171875, | |
| "learning_rate": 1.5102880658436214e-05, | |
| "loss": 0.5243, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.188888888888889, | |
| "grad_norm": 781908.25, | |
| "learning_rate": 1.506172839506173e-05, | |
| "loss": 0.6291, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.1911111111111112, | |
| "grad_norm": 426974.3125, | |
| "learning_rate": 1.5020576131687244e-05, | |
| "loss": 0.3932, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.1933333333333334, | |
| "grad_norm": 2040763.875, | |
| "learning_rate": 1.4979423868312758e-05, | |
| "loss": 0.5105, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.1955555555555555, | |
| "grad_norm": 178407.359375, | |
| "learning_rate": 1.4938271604938272e-05, | |
| "loss": 0.3597, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.1977777777777776, | |
| "grad_norm": 3175741.75, | |
| "learning_rate": 1.4897119341563787e-05, | |
| "loss": 0.5331, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 854287.3125, | |
| "learning_rate": 1.48559670781893e-05, | |
| "loss": 0.5446, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2022222222222223, | |
| "grad_norm": 164806.8125, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 0.6404, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.2044444444444444, | |
| "grad_norm": 770559.1875, | |
| "learning_rate": 1.4773662551440329e-05, | |
| "loss": 0.4534, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.2066666666666666, | |
| "grad_norm": 3397058.5, | |
| "learning_rate": 1.4732510288065845e-05, | |
| "loss": 0.6368, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.2088888888888887, | |
| "grad_norm": 628163.8125, | |
| "learning_rate": 1.4691358024691359e-05, | |
| "loss": 0.5398, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.2111111111111112, | |
| "grad_norm": 451703.46875, | |
| "learning_rate": 1.4650205761316873e-05, | |
| "loss": 0.5462, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.2133333333333334, | |
| "grad_norm": 375908.25, | |
| "learning_rate": 1.4609053497942387e-05, | |
| "loss": 0.4582, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.2155555555555555, | |
| "grad_norm": 450731.4375, | |
| "learning_rate": 1.4567901234567902e-05, | |
| "loss": 0.4855, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.2177777777777776, | |
| "grad_norm": 436833.4375, | |
| "learning_rate": 1.4526748971193416e-05, | |
| "loss": 0.544, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 214384.796875, | |
| "learning_rate": 1.448559670781893e-05, | |
| "loss": 0.5323, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 813175.4375, | |
| "learning_rate": 1.4444444444444444e-05, | |
| "loss": 0.5391, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2244444444444444, | |
| "grad_norm": 252968.859375, | |
| "learning_rate": 1.440329218106996e-05, | |
| "loss": 0.4484, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.2266666666666666, | |
| "grad_norm": 698381.125, | |
| "learning_rate": 1.4362139917695474e-05, | |
| "loss": 0.4685, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.2288888888888887, | |
| "grad_norm": 1146888.125, | |
| "learning_rate": 1.4320987654320988e-05, | |
| "loss": 0.4942, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.2311111111111113, | |
| "grad_norm": 562644.9375, | |
| "learning_rate": 1.4279835390946502e-05, | |
| "loss": 0.5762, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.2333333333333334, | |
| "grad_norm": 259112.0, | |
| "learning_rate": 1.4238683127572017e-05, | |
| "loss": 0.4417, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.2355555555555555, | |
| "grad_norm": 440842.65625, | |
| "learning_rate": 1.419753086419753e-05, | |
| "loss": 0.5042, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.2377777777777776, | |
| "grad_norm": 1526809.625, | |
| "learning_rate": 1.4156378600823045e-05, | |
| "loss": 0.6759, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 344080.34375, | |
| "learning_rate": 1.411522633744856e-05, | |
| "loss": 0.4194, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.2422222222222223, | |
| "grad_norm": 1327518.875, | |
| "learning_rate": 1.4074074074074075e-05, | |
| "loss": 0.5189, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.2444444444444445, | |
| "grad_norm": 496668.375, | |
| "learning_rate": 1.4032921810699589e-05, | |
| "loss": 0.4864, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2466666666666666, | |
| "grad_norm": 341703.5, | |
| "learning_rate": 1.3991769547325103e-05, | |
| "loss": 0.4511, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.2488888888888887, | |
| "grad_norm": 1272398.0, | |
| "learning_rate": 1.3950617283950617e-05, | |
| "loss": 0.4773, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.2511111111111113, | |
| "grad_norm": 1034172.625, | |
| "learning_rate": 1.3909465020576132e-05, | |
| "loss": 0.5444, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.2533333333333334, | |
| "grad_norm": 538651.0625, | |
| "learning_rate": 1.3868312757201646e-05, | |
| "loss": 0.4896, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.2555555555555555, | |
| "grad_norm": 759162.3125, | |
| "learning_rate": 1.382716049382716e-05, | |
| "loss": 0.4478, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.2577777777777777, | |
| "grad_norm": 1303284.375, | |
| "learning_rate": 1.3786008230452676e-05, | |
| "loss": 0.6972, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 1262487.5, | |
| "learning_rate": 1.374485596707819e-05, | |
| "loss": 0.5338, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.2622222222222224, | |
| "grad_norm": 916309.125, | |
| "learning_rate": 1.3703703703703704e-05, | |
| "loss": 0.6122, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.2644444444444445, | |
| "grad_norm": 459222.71875, | |
| "learning_rate": 1.3662551440329218e-05, | |
| "loss": 0.4757, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.2666666666666666, | |
| "grad_norm": 517379.3125, | |
| "learning_rate": 1.3621399176954732e-05, | |
| "loss": 0.6726, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.2688888888888887, | |
| "grad_norm": 391503.15625, | |
| "learning_rate": 1.3580246913580247e-05, | |
| "loss": 0.6403, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.2711111111111113, | |
| "grad_norm": 1065075.875, | |
| "learning_rate": 1.353909465020576e-05, | |
| "loss": 0.5819, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.2733333333333334, | |
| "grad_norm": 819979.5, | |
| "learning_rate": 1.3497942386831278e-05, | |
| "loss": 0.5818, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.2755555555555556, | |
| "grad_norm": 604300.375, | |
| "learning_rate": 1.3456790123456793e-05, | |
| "loss": 0.5714, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.2777777777777777, | |
| "grad_norm": 298492.9375, | |
| "learning_rate": 1.3415637860082305e-05, | |
| "loss": 0.4724, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.2800000000000002, | |
| "grad_norm": 535608.0, | |
| "learning_rate": 1.3374485596707819e-05, | |
| "loss": 0.4827, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.2822222222222224, | |
| "grad_norm": 2979703.75, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.5448, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.2844444444444445, | |
| "grad_norm": 876960.875, | |
| "learning_rate": 1.3292181069958847e-05, | |
| "loss": 0.5854, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.2866666666666666, | |
| "grad_norm": 350376.8125, | |
| "learning_rate": 1.3251028806584362e-05, | |
| "loss": 0.5619, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.2888888888888888, | |
| "grad_norm": 1407370.875, | |
| "learning_rate": 1.3209876543209876e-05, | |
| "loss": 0.4644, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.2911111111111113, | |
| "grad_norm": 431142.75, | |
| "learning_rate": 1.3168724279835393e-05, | |
| "loss": 0.577, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.2933333333333334, | |
| "grad_norm": 1626133.875, | |
| "learning_rate": 1.3127572016460907e-05, | |
| "loss": 0.5798, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.2955555555555556, | |
| "grad_norm": 731867.6875, | |
| "learning_rate": 1.308641975308642e-05, | |
| "loss": 0.5515, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.2977777777777777, | |
| "grad_norm": 519474.03125, | |
| "learning_rate": 1.3045267489711934e-05, | |
| "loss": 0.5661, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 1156532.125, | |
| "learning_rate": 1.3004115226337448e-05, | |
| "loss": 0.5095, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.3022222222222224, | |
| "grad_norm": 566316.4375, | |
| "learning_rate": 1.2962962962962962e-05, | |
| "loss": 0.5757, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.3044444444444445, | |
| "grad_norm": 1570133.75, | |
| "learning_rate": 1.2921810699588477e-05, | |
| "loss": 0.5725, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.3066666666666666, | |
| "grad_norm": 233635.09375, | |
| "learning_rate": 1.2880658436213994e-05, | |
| "loss": 0.4753, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.3088888888888888, | |
| "grad_norm": 667559.75, | |
| "learning_rate": 1.2839506172839508e-05, | |
| "loss": 0.5486, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.311111111111111, | |
| "grad_norm": 259317.078125, | |
| "learning_rate": 1.2798353909465022e-05, | |
| "loss": 0.5512, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.3133333333333335, | |
| "grad_norm": 334346.96875, | |
| "learning_rate": 1.2757201646090535e-05, | |
| "loss": 0.6806, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.3155555555555556, | |
| "grad_norm": 380433.90625, | |
| "learning_rate": 1.2716049382716049e-05, | |
| "loss": 0.4861, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.3177777777777777, | |
| "grad_norm": 591011.1875, | |
| "learning_rate": 1.2674897119341563e-05, | |
| "loss": 0.5437, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 488778.875, | |
| "learning_rate": 1.2633744855967077e-05, | |
| "loss": 0.528, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.3222222222222224, | |
| "grad_norm": 1632864.625, | |
| "learning_rate": 1.2592592592592592e-05, | |
| "loss": 0.5726, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.3244444444444445, | |
| "grad_norm": 222425.59375, | |
| "learning_rate": 1.255144032921811e-05, | |
| "loss": 0.5392, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.3266666666666667, | |
| "grad_norm": 402851.4375, | |
| "learning_rate": 1.2510288065843623e-05, | |
| "loss": 0.4061, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.328888888888889, | |
| "grad_norm": 253659.3125, | |
| "learning_rate": 1.2469135802469137e-05, | |
| "loss": 0.5208, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.3311111111111114, | |
| "grad_norm": 753892.75, | |
| "learning_rate": 1.2427983539094652e-05, | |
| "loss": 0.5614, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 760216.625, | |
| "learning_rate": 1.2386831275720164e-05, | |
| "loss": 0.4855, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.3355555555555556, | |
| "grad_norm": 1279448.5, | |
| "learning_rate": 1.2345679012345678e-05, | |
| "loss": 0.5871, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.3377777777777777, | |
| "grad_norm": 878854.0625, | |
| "learning_rate": 1.2304526748971194e-05, | |
| "loss": 0.469, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 1088848.0, | |
| "learning_rate": 1.2263374485596708e-05, | |
| "loss": 0.4336, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.3422222222222224, | |
| "grad_norm": 986841.875, | |
| "learning_rate": 1.2222222222222222e-05, | |
| "loss": 0.4855, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.3444444444444446, | |
| "grad_norm": 534454.5, | |
| "learning_rate": 1.2181069958847738e-05, | |
| "loss": 0.5174, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.3466666666666667, | |
| "grad_norm": 1499670.625, | |
| "learning_rate": 1.2139917695473252e-05, | |
| "loss": 0.4638, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.348888888888889, | |
| "grad_norm": 1340620.375, | |
| "learning_rate": 1.2098765432098767e-05, | |
| "loss": 0.4267, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.351111111111111, | |
| "grad_norm": 1036810.5625, | |
| "learning_rate": 1.2057613168724279e-05, | |
| "loss": 0.4333, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.3533333333333335, | |
| "grad_norm": 415893.0, | |
| "learning_rate": 1.2016460905349795e-05, | |
| "loss": 0.5185, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.3555555555555556, | |
| "grad_norm": 490719.5, | |
| "learning_rate": 1.1975308641975309e-05, | |
| "loss": 0.4477, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.3577777777777778, | |
| "grad_norm": 342900.34375, | |
| "learning_rate": 1.1934156378600823e-05, | |
| "loss": 0.4744, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 299380.8125, | |
| "learning_rate": 1.1893004115226337e-05, | |
| "loss": 0.5143, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.362222222222222, | |
| "grad_norm": 789056.3125, | |
| "learning_rate": 1.1851851851851853e-05, | |
| "loss": 0.5449, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.3644444444444446, | |
| "grad_norm": 476553.375, | |
| "learning_rate": 1.1810699588477367e-05, | |
| "loss": 0.5639, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.3666666666666667, | |
| "grad_norm": 607017.375, | |
| "learning_rate": 1.1769547325102882e-05, | |
| "loss": 0.4924, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.368888888888889, | |
| "grad_norm": 197236.609375, | |
| "learning_rate": 1.1728395061728396e-05, | |
| "loss": 0.5817, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.371111111111111, | |
| "grad_norm": 438141.1875, | |
| "learning_rate": 1.168724279835391e-05, | |
| "loss": 0.5657, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.3733333333333335, | |
| "grad_norm": 351896.28125, | |
| "learning_rate": 1.1646090534979424e-05, | |
| "loss": 0.5776, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.3755555555555556, | |
| "grad_norm": 486697.25, | |
| "learning_rate": 1.1604938271604938e-05, | |
| "loss": 0.4206, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.3777777777777778, | |
| "grad_norm": 895090.4375, | |
| "learning_rate": 1.1563786008230454e-05, | |
| "loss": 0.4537, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 385076.90625, | |
| "learning_rate": 1.1522633744855968e-05, | |
| "loss": 0.5011, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.3822222222222225, | |
| "grad_norm": 422288.5625, | |
| "learning_rate": 1.1481481481481482e-05, | |
| "loss": 0.5286, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.3844444444444446, | |
| "grad_norm": 1136325.5, | |
| "learning_rate": 1.1440329218106997e-05, | |
| "loss": 0.5168, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.3866666666666667, | |
| "grad_norm": 385298.5625, | |
| "learning_rate": 1.139917695473251e-05, | |
| "loss": 0.5362, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.388888888888889, | |
| "grad_norm": 982441.0, | |
| "learning_rate": 1.1358024691358025e-05, | |
| "loss": 0.5212, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.391111111111111, | |
| "grad_norm": 119587.1484375, | |
| "learning_rate": 1.1316872427983539e-05, | |
| "loss": 0.5002, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.3933333333333335, | |
| "grad_norm": 656648.4375, | |
| "learning_rate": 1.1275720164609053e-05, | |
| "loss": 0.6891, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.3955555555555557, | |
| "grad_norm": 761881.3125, | |
| "learning_rate": 1.123456790123457e-05, | |
| "loss": 0.3717, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.397777777777778, | |
| "grad_norm": 1027932.5, | |
| "learning_rate": 1.1193415637860083e-05, | |
| "loss": 0.6497, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 405516.03125, | |
| "learning_rate": 1.1152263374485597e-05, | |
| "loss": 0.4851, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.402222222222222, | |
| "grad_norm": 1639993.875, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 0.5472, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.4044444444444446, | |
| "grad_norm": 212303.59375, | |
| "learning_rate": 1.1069958847736626e-05, | |
| "loss": 0.5818, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.4066666666666667, | |
| "grad_norm": 152677.265625, | |
| "learning_rate": 1.102880658436214e-05, | |
| "loss": 0.5628, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.408888888888889, | |
| "grad_norm": 957934.75, | |
| "learning_rate": 1.0987654320987654e-05, | |
| "loss": 0.4918, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.411111111111111, | |
| "grad_norm": 318580.0625, | |
| "learning_rate": 1.094650205761317e-05, | |
| "loss": 0.5454, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.413333333333333, | |
| "grad_norm": 335783.8125, | |
| "learning_rate": 1.0905349794238684e-05, | |
| "loss": 0.3913, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.4155555555555557, | |
| "grad_norm": 606167.6875, | |
| "learning_rate": 1.0864197530864198e-05, | |
| "loss": 0.4603, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.417777777777778, | |
| "grad_norm": 1082866.5, | |
| "learning_rate": 1.0823045267489712e-05, | |
| "loss": 0.556, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 439366.1875, | |
| "learning_rate": 1.0781893004115227e-05, | |
| "loss": 0.5366, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.422222222222222, | |
| "grad_norm": 859306.625, | |
| "learning_rate": 1.074074074074074e-05, | |
| "loss": 0.557, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.4244444444444446, | |
| "grad_norm": 356610.53125, | |
| "learning_rate": 1.0699588477366255e-05, | |
| "loss": 0.6092, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.4266666666666667, | |
| "grad_norm": 862626.875, | |
| "learning_rate": 1.0658436213991769e-05, | |
| "loss": 0.5368, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.428888888888889, | |
| "grad_norm": 477033.71875, | |
| "learning_rate": 1.0617283950617285e-05, | |
| "loss": 0.6038, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.431111111111111, | |
| "grad_norm": 184494.3125, | |
| "learning_rate": 1.0576131687242799e-05, | |
| "loss": 0.4473, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.4333333333333336, | |
| "grad_norm": 704048.75, | |
| "learning_rate": 1.0534979423868313e-05, | |
| "loss": 0.5575, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.4355555555555557, | |
| "grad_norm": 1460752.5, | |
| "learning_rate": 1.0493827160493827e-05, | |
| "loss": 0.6811, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.437777777777778, | |
| "grad_norm": 1865951.0, | |
| "learning_rate": 1.0452674897119343e-05, | |
| "loss": 0.7688, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 316543.03125, | |
| "learning_rate": 1.0411522633744856e-05, | |
| "loss": 0.4987, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.442222222222222, | |
| "grad_norm": 392440.625, | |
| "learning_rate": 1.037037037037037e-05, | |
| "loss": 0.6592, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 286228.875, | |
| "learning_rate": 1.0329218106995884e-05, | |
| "loss": 0.6525, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.4466666666666668, | |
| "grad_norm": 436523.78125, | |
| "learning_rate": 1.02880658436214e-05, | |
| "loss": 0.4301, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.448888888888889, | |
| "grad_norm": 501997.0625, | |
| "learning_rate": 1.0246913580246914e-05, | |
| "loss": 0.4842, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.451111111111111, | |
| "grad_norm": 902289.5625, | |
| "learning_rate": 1.0205761316872428e-05, | |
| "loss": 0.5599, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.453333333333333, | |
| "grad_norm": 2513825.25, | |
| "learning_rate": 1.0164609053497944e-05, | |
| "loss": 0.6615, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.4555555555555557, | |
| "grad_norm": 345013.1875, | |
| "learning_rate": 1.0123456790123458e-05, | |
| "loss": 0.5646, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.457777777777778, | |
| "grad_norm": 651783.6875, | |
| "learning_rate": 1.008230452674897e-05, | |
| "loss": 0.6124, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 2414475.75, | |
| "learning_rate": 1.0041152263374485e-05, | |
| "loss": 0.4809, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.462222222222222, | |
| "grad_norm": 312356.15625, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5917, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.464444444444444, | |
| "grad_norm": 1360208.625, | |
| "learning_rate": 9.958847736625515e-06, | |
| "loss": 0.7328, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.466666666666667, | |
| "grad_norm": 564814.125, | |
| "learning_rate": 9.917695473251029e-06, | |
| "loss": 0.5012, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.468888888888889, | |
| "grad_norm": 783253.1875, | |
| "learning_rate": 9.876543209876543e-06, | |
| "loss": 0.5959, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.471111111111111, | |
| "grad_norm": 1069690.125, | |
| "learning_rate": 9.83539094650206e-06, | |
| "loss": 0.4775, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.473333333333333, | |
| "grad_norm": 698115.0, | |
| "learning_rate": 9.794238683127573e-06, | |
| "loss": 0.6113, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.4755555555555557, | |
| "grad_norm": 595121.625, | |
| "learning_rate": 9.753086419753086e-06, | |
| "loss": 0.4327, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.477777777777778, | |
| "grad_norm": 686340.625, | |
| "learning_rate": 9.7119341563786e-06, | |
| "loss": 0.5782, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 562637.4375, | |
| "learning_rate": 9.670781893004116e-06, | |
| "loss": 0.572, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.482222222222222, | |
| "grad_norm": 700603.75, | |
| "learning_rate": 9.62962962962963e-06, | |
| "loss": 0.4896, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.4844444444444447, | |
| "grad_norm": 8574117.0, | |
| "learning_rate": 9.588477366255144e-06, | |
| "loss": 0.5126, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.486666666666667, | |
| "grad_norm": 1065802.0, | |
| "learning_rate": 9.54732510288066e-06, | |
| "loss": 0.5362, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.488888888888889, | |
| "grad_norm": 333646.25, | |
| "learning_rate": 9.506172839506174e-06, | |
| "loss": 0.5609, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.491111111111111, | |
| "grad_norm": 804433.9375, | |
| "learning_rate": 9.465020576131688e-06, | |
| "loss": 0.5512, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.493333333333333, | |
| "grad_norm": 391463.375, | |
| "learning_rate": 9.423868312757202e-06, | |
| "loss": 0.4817, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.4955555555555557, | |
| "grad_norm": 1378079.375, | |
| "learning_rate": 9.382716049382717e-06, | |
| "loss": 0.3697, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.497777777777778, | |
| "grad_norm": 505990.03125, | |
| "learning_rate": 9.34156378600823e-06, | |
| "loss": 0.5476, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 3523881.5, | |
| "learning_rate": 9.300411522633745e-06, | |
| "loss": 0.5519, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.502222222222222, | |
| "grad_norm": 341878.0625, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.6628, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.5044444444444443, | |
| "grad_norm": 391592.46875, | |
| "learning_rate": 9.218106995884775e-06, | |
| "loss": 0.5047, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.506666666666667, | |
| "grad_norm": 1288312.375, | |
| "learning_rate": 9.17695473251029e-06, | |
| "loss": 0.5387, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.508888888888889, | |
| "grad_norm": 965933.8125, | |
| "learning_rate": 9.135802469135803e-06, | |
| "loss": 0.6002, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.511111111111111, | |
| "grad_norm": 575313.9375, | |
| "learning_rate": 9.094650205761317e-06, | |
| "loss": 0.5218, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.513333333333333, | |
| "grad_norm": 508500.59375, | |
| "learning_rate": 9.053497942386832e-06, | |
| "loss": 0.5815, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 2.5155555555555553, | |
| "grad_norm": 388008.0, | |
| "learning_rate": 9.012345679012346e-06, | |
| "loss": 0.4585, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 2.517777777777778, | |
| "grad_norm": 927888.25, | |
| "learning_rate": 8.97119341563786e-06, | |
| "loss": 0.5115, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 1426170.375, | |
| "learning_rate": 8.930041152263374e-06, | |
| "loss": 0.4249, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 2.522222222222222, | |
| "grad_norm": 710862.0625, | |
| "learning_rate": 8.88888888888889e-06, | |
| "loss": 0.4375, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.5244444444444447, | |
| "grad_norm": 900323.4375, | |
| "learning_rate": 8.847736625514404e-06, | |
| "loss": 0.5462, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 2.5266666666666664, | |
| "grad_norm": 444571.1875, | |
| "learning_rate": 8.806584362139918e-06, | |
| "loss": 0.4567, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 2.528888888888889, | |
| "grad_norm": 567729.4375, | |
| "learning_rate": 8.765432098765432e-06, | |
| "loss": 0.5864, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 2.531111111111111, | |
| "grad_norm": 215833.796875, | |
| "learning_rate": 8.724279835390947e-06, | |
| "loss": 0.4469, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 2.533333333333333, | |
| "grad_norm": 569421.875, | |
| "learning_rate": 8.68312757201646e-06, | |
| "loss": 0.5494, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.535555555555556, | |
| "grad_norm": 558661.5, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 0.4522, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 2.537777777777778, | |
| "grad_norm": 2251505.0, | |
| "learning_rate": 8.60082304526749e-06, | |
| "loss": 0.4683, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 796107.0625, | |
| "learning_rate": 8.559670781893005e-06, | |
| "loss": 0.6036, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 2.542222222222222, | |
| "grad_norm": 360527.28125, | |
| "learning_rate": 8.518518518518519e-06, | |
| "loss": 0.4764, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.5444444444444443, | |
| "grad_norm": 412231.875, | |
| "learning_rate": 8.477366255144033e-06, | |
| "loss": 0.4871, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.546666666666667, | |
| "grad_norm": 331903.4375, | |
| "learning_rate": 8.436213991769547e-06, | |
| "loss": 0.5037, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 2.548888888888889, | |
| "grad_norm": 1922785.125, | |
| "learning_rate": 8.395061728395062e-06, | |
| "loss": 0.5288, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 2.551111111111111, | |
| "grad_norm": 391779.03125, | |
| "learning_rate": 8.353909465020576e-06, | |
| "loss": 0.5619, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 2.5533333333333332, | |
| "grad_norm": 590551.1875, | |
| "learning_rate": 8.31275720164609e-06, | |
| "loss": 0.539, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 2.5555555555555554, | |
| "grad_norm": 1713623.75, | |
| "learning_rate": 8.271604938271606e-06, | |
| "loss": 0.5789, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.557777777777778, | |
| "grad_norm": 550370.375, | |
| "learning_rate": 8.23045267489712e-06, | |
| "loss": 0.5584, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 662291.75, | |
| "learning_rate": 8.189300411522634e-06, | |
| "loss": 0.5375, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 2.562222222222222, | |
| "grad_norm": 457474.65625, | |
| "learning_rate": 8.14814814814815e-06, | |
| "loss": 0.5263, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 2.5644444444444443, | |
| "grad_norm": 955969.6875, | |
| "learning_rate": 8.106995884773662e-06, | |
| "loss": 0.589, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 2.5666666666666664, | |
| "grad_norm": 712221.1875, | |
| "learning_rate": 8.065843621399177e-06, | |
| "loss": 0.4752, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.568888888888889, | |
| "grad_norm": 2151926.25, | |
| "learning_rate": 8.02469135802469e-06, | |
| "loss": 0.4964, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 2.571111111111111, | |
| "grad_norm": 749660.25, | |
| "learning_rate": 7.983539094650207e-06, | |
| "loss": 0.7246, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 2.5733333333333333, | |
| "grad_norm": 1508001.0, | |
| "learning_rate": 7.94238683127572e-06, | |
| "loss": 0.4743, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 2.575555555555556, | |
| "grad_norm": 749034.625, | |
| "learning_rate": 7.901234567901235e-06, | |
| "loss": 0.5987, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 2.5777777777777775, | |
| "grad_norm": 288706.375, | |
| "learning_rate": 7.860082304526749e-06, | |
| "loss": 0.4942, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 256755.78125, | |
| "learning_rate": 7.818930041152265e-06, | |
| "loss": 0.569, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 2.582222222222222, | |
| "grad_norm": 871312.375, | |
| "learning_rate": 7.777777777777777e-06, | |
| "loss": 0.4861, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 2.5844444444444443, | |
| "grad_norm": 900119.3125, | |
| "learning_rate": 7.736625514403292e-06, | |
| "loss": 0.6361, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 2.586666666666667, | |
| "grad_norm": 1303723.375, | |
| "learning_rate": 7.695473251028806e-06, | |
| "loss": 0.6025, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 2.588888888888889, | |
| "grad_norm": 1132907.75, | |
| "learning_rate": 7.654320987654322e-06, | |
| "loss": 0.6582, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.591111111111111, | |
| "grad_norm": 928938.375, | |
| "learning_rate": 7.613168724279836e-06, | |
| "loss": 0.6281, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 2.5933333333333333, | |
| "grad_norm": 289348.6875, | |
| "learning_rate": 7.57201646090535e-06, | |
| "loss": 0.7755, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 2.5955555555555554, | |
| "grad_norm": 447535.09375, | |
| "learning_rate": 7.530864197530865e-06, | |
| "loss": 0.5153, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 2.597777777777778, | |
| "grad_norm": 886854.625, | |
| "learning_rate": 7.489711934156379e-06, | |
| "loss": 0.5239, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 1512038.0, | |
| "learning_rate": 7.448559670781893e-06, | |
| "loss": 0.7509, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.602222222222222, | |
| "grad_norm": 322297.5625, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.5442, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 2.6044444444444443, | |
| "grad_norm": 359192.5, | |
| "learning_rate": 7.3662551440329225e-06, | |
| "loss": 0.5081, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 2.6066666666666665, | |
| "grad_norm": 1025895.5, | |
| "learning_rate": 7.325102880658437e-06, | |
| "loss": 0.5026, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 2.608888888888889, | |
| "grad_norm": 358256.5625, | |
| "learning_rate": 7.283950617283951e-06, | |
| "loss": 0.5034, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 2.611111111111111, | |
| "grad_norm": 276032.125, | |
| "learning_rate": 7.242798353909465e-06, | |
| "loss": 0.5133, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.6133333333333333, | |
| "grad_norm": 566315.75, | |
| "learning_rate": 7.20164609053498e-06, | |
| "loss": 0.5736, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 2.6155555555555554, | |
| "grad_norm": 442241.8125, | |
| "learning_rate": 7.160493827160494e-06, | |
| "loss": 0.5482, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 2.6177777777777775, | |
| "grad_norm": 414541.0625, | |
| "learning_rate": 7.119341563786008e-06, | |
| "loss": 0.4391, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 462216.65625, | |
| "learning_rate": 7.0781893004115225e-06, | |
| "loss": 0.512, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 2.6222222222222222, | |
| "grad_norm": 352849.78125, | |
| "learning_rate": 7.0370370370370375e-06, | |
| "loss": 0.5742, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.6244444444444444, | |
| "grad_norm": 1022206.0, | |
| "learning_rate": 6.995884773662552e-06, | |
| "loss": 0.5395, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 2.626666666666667, | |
| "grad_norm": 1133120.0, | |
| "learning_rate": 6.954732510288066e-06, | |
| "loss": 0.591, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 2.628888888888889, | |
| "grad_norm": 1288370.625, | |
| "learning_rate": 6.91358024691358e-06, | |
| "loss": 0.5779, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 2.631111111111111, | |
| "grad_norm": 271517.6875, | |
| "learning_rate": 6.872427983539095e-06, | |
| "loss": 0.5678, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 2.6333333333333333, | |
| "grad_norm": 429049.8125, | |
| "learning_rate": 6.831275720164609e-06, | |
| "loss": 0.4092, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.6355555555555554, | |
| "grad_norm": 456271.40625, | |
| "learning_rate": 6.790123456790123e-06, | |
| "loss": 0.4815, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 2.637777777777778, | |
| "grad_norm": 1050220.75, | |
| "learning_rate": 6.748971193415639e-06, | |
| "loss": 0.5069, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 350885.25, | |
| "learning_rate": 6.7078189300411525e-06, | |
| "loss": 0.5406, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 2.6422222222222222, | |
| "grad_norm": 195271.328125, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.39, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 2.6444444444444444, | |
| "grad_norm": 583369.375, | |
| "learning_rate": 6.625514403292181e-06, | |
| "loss": 0.601, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.6466666666666665, | |
| "grad_norm": 644399.125, | |
| "learning_rate": 6.584362139917697e-06, | |
| "loss": 0.5584, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 2.648888888888889, | |
| "grad_norm": 396948.625, | |
| "learning_rate": 6.54320987654321e-06, | |
| "loss": 0.4902, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 2.651111111111111, | |
| "grad_norm": 538472.9375, | |
| "learning_rate": 6.502057613168724e-06, | |
| "loss": 0.5069, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 2.6533333333333333, | |
| "grad_norm": 734710.625, | |
| "learning_rate": 6.460905349794238e-06, | |
| "loss": 0.5317, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 2.6555555555555554, | |
| "grad_norm": 932180.3125, | |
| "learning_rate": 6.419753086419754e-06, | |
| "loss": 0.5767, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.6577777777777776, | |
| "grad_norm": 534092.5, | |
| "learning_rate": 6.3786008230452675e-06, | |
| "loss": 0.5063, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 756892.3125, | |
| "learning_rate": 6.337448559670782e-06, | |
| "loss": 0.491, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 2.6622222222222223, | |
| "grad_norm": 469926.84375, | |
| "learning_rate": 6.296296296296296e-06, | |
| "loss": 0.4764, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 2.6644444444444444, | |
| "grad_norm": 853601.875, | |
| "learning_rate": 6.255144032921812e-06, | |
| "loss": 0.6428, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 134657.203125, | |
| "learning_rate": 6.213991769547326e-06, | |
| "loss": 0.3901, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.6688888888888886, | |
| "grad_norm": 912945.125, | |
| "learning_rate": 6.172839506172839e-06, | |
| "loss": 0.8071, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 2.671111111111111, | |
| "grad_norm": 359723.96875, | |
| "learning_rate": 6.131687242798354e-06, | |
| "loss": 0.4296, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 2.6733333333333333, | |
| "grad_norm": 336840.28125, | |
| "learning_rate": 6.090534979423869e-06, | |
| "loss": 0.5657, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 2.6755555555555555, | |
| "grad_norm": 381600.5625, | |
| "learning_rate": 6.049382716049383e-06, | |
| "loss": 0.6522, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 2.677777777777778, | |
| "grad_norm": 159033.796875, | |
| "learning_rate": 6.0082304526748975e-06, | |
| "loss": 0.5458, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 1330061.5, | |
| "learning_rate": 5.967078189300412e-06, | |
| "loss": 0.5794, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 2.6822222222222223, | |
| "grad_norm": 631698.125, | |
| "learning_rate": 5.925925925925927e-06, | |
| "loss": 0.5271, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 2.6844444444444444, | |
| "grad_norm": 357874.53125, | |
| "learning_rate": 5.884773662551441e-06, | |
| "loss": 0.3977, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 2.6866666666666665, | |
| "grad_norm": 3945204.0, | |
| "learning_rate": 5.843621399176955e-06, | |
| "loss": 0.4593, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 2.688888888888889, | |
| "grad_norm": 1239144.25, | |
| "learning_rate": 5.802469135802469e-06, | |
| "loss": 0.4449, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.6911111111111112, | |
| "grad_norm": 1069190.125, | |
| "learning_rate": 5.761316872427984e-06, | |
| "loss": 0.526, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 2.6933333333333334, | |
| "grad_norm": 1473693.75, | |
| "learning_rate": 5.720164609053498e-06, | |
| "loss": 0.6264, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 2.6955555555555555, | |
| "grad_norm": 660884.1875, | |
| "learning_rate": 5.6790123456790125e-06, | |
| "loss": 0.5883, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 2.6977777777777776, | |
| "grad_norm": 450712.21875, | |
| "learning_rate": 5.637860082304527e-06, | |
| "loss": 0.5217, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 1028915.375, | |
| "learning_rate": 5.596707818930042e-06, | |
| "loss": 0.5244, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.7022222222222223, | |
| "grad_norm": 1851113.75, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 0.5434, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 2.7044444444444444, | |
| "grad_norm": 1106682.5, | |
| "learning_rate": 5.51440329218107e-06, | |
| "loss": 0.5526, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 2.7066666666666666, | |
| "grad_norm": 295857.375, | |
| "learning_rate": 5.473251028806585e-06, | |
| "loss": 0.595, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 2.7088888888888887, | |
| "grad_norm": 563381.375, | |
| "learning_rate": 5.432098765432099e-06, | |
| "loss": 0.5262, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 2.7111111111111112, | |
| "grad_norm": 454781.5625, | |
| "learning_rate": 5.390946502057613e-06, | |
| "loss": 0.4509, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.7133333333333334, | |
| "grad_norm": 932880.8125, | |
| "learning_rate": 5.3497942386831275e-06, | |
| "loss": 0.6851, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 2.7155555555555555, | |
| "grad_norm": 1671730.75, | |
| "learning_rate": 5.3086419753086425e-06, | |
| "loss": 0.6121, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 2.7177777777777776, | |
| "grad_norm": 4467779.0, | |
| "learning_rate": 5.267489711934157e-06, | |
| "loss": 0.6277, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 900557.4375, | |
| "learning_rate": 5.226337448559672e-06, | |
| "loss": 0.4294, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 2.7222222222222223, | |
| "grad_norm": 160277.46875, | |
| "learning_rate": 5.185185185185185e-06, | |
| "loss": 0.6811, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.7244444444444444, | |
| "grad_norm": 149784.03125, | |
| "learning_rate": 5.1440329218107e-06, | |
| "loss": 0.5307, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 2.7266666666666666, | |
| "grad_norm": 244266.984375, | |
| "learning_rate": 5.102880658436214e-06, | |
| "loss": 0.5173, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 2.728888888888889, | |
| "grad_norm": 401191.625, | |
| "learning_rate": 5.061728395061729e-06, | |
| "loss": 0.4737, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 2.7311111111111113, | |
| "grad_norm": 1498860.25, | |
| "learning_rate": 5.0205761316872425e-06, | |
| "loss": 0.4934, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 2.7333333333333334, | |
| "grad_norm": 145583.71875, | |
| "learning_rate": 4.9794238683127575e-06, | |
| "loss": 0.5605, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.7355555555555555, | |
| "grad_norm": 740040.5, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.5821, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 2.7377777777777776, | |
| "grad_norm": 2460071.25, | |
| "learning_rate": 4.897119341563787e-06, | |
| "loss": 0.7144, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 803059.0, | |
| "learning_rate": 4.8559670781893e-06, | |
| "loss": 0.6062, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 2.7422222222222223, | |
| "grad_norm": 657412.25, | |
| "learning_rate": 4.814814814814815e-06, | |
| "loss": 0.4763, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 2.7444444444444445, | |
| "grad_norm": 307322.96875, | |
| "learning_rate": 4.77366255144033e-06, | |
| "loss": 0.4573, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.7466666666666666, | |
| "grad_norm": 1087119.5, | |
| "learning_rate": 4.732510288065844e-06, | |
| "loss": 0.5798, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 2.7488888888888887, | |
| "grad_norm": 476222.65625, | |
| "learning_rate": 4.691358024691358e-06, | |
| "loss": 0.4826, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 2.7511111111111113, | |
| "grad_norm": 769200.875, | |
| "learning_rate": 4.6502057613168725e-06, | |
| "loss": 0.477, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 2.7533333333333334, | |
| "grad_norm": 604819.4375, | |
| "learning_rate": 4.6090534979423875e-06, | |
| "loss": 0.4938, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 2.7555555555555555, | |
| "grad_norm": 1535238.25, | |
| "learning_rate": 4.567901234567902e-06, | |
| "loss": 0.3676, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.7577777777777777, | |
| "grad_norm": 841712.125, | |
| "learning_rate": 4.526748971193416e-06, | |
| "loss": 0.5449, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 586374.5, | |
| "learning_rate": 4.48559670781893e-06, | |
| "loss": 0.4049, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 2.7622222222222224, | |
| "grad_norm": 1455824.125, | |
| "learning_rate": 4.444444444444445e-06, | |
| "loss": 0.528, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 2.7644444444444445, | |
| "grad_norm": 1251346.75, | |
| "learning_rate": 4.403292181069959e-06, | |
| "loss": 0.4563, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 2.7666666666666666, | |
| "grad_norm": 253219.390625, | |
| "learning_rate": 4.362139917695473e-06, | |
| "loss": 0.4896, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.7688888888888887, | |
| "grad_norm": 348807.59375, | |
| "learning_rate": 4.3209876543209875e-06, | |
| "loss": 0.4689, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 2.771111111111111, | |
| "grad_norm": 355768.21875, | |
| "learning_rate": 4.2798353909465025e-06, | |
| "loss": 0.5001, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 2.7733333333333334, | |
| "grad_norm": 222135.0, | |
| "learning_rate": 4.238683127572017e-06, | |
| "loss": 0.533, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 2.7755555555555556, | |
| "grad_norm": 638996.125, | |
| "learning_rate": 4.197530864197531e-06, | |
| "loss": 0.4011, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 493758.21875, | |
| "learning_rate": 4.156378600823045e-06, | |
| "loss": 0.549, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.7800000000000002, | |
| "grad_norm": 468891.9375, | |
| "learning_rate": 4.11522633744856e-06, | |
| "loss": 0.5394, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 2.7822222222222224, | |
| "grad_norm": 793925.3125, | |
| "learning_rate": 4.074074074074075e-06, | |
| "loss": 0.7011, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 2.7844444444444445, | |
| "grad_norm": 572089.5, | |
| "learning_rate": 4.032921810699588e-06, | |
| "loss": 0.4487, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 2.7866666666666666, | |
| "grad_norm": 559628.75, | |
| "learning_rate": 3.991769547325103e-06, | |
| "loss": 0.5446, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 2.7888888888888888, | |
| "grad_norm": 947452.4375, | |
| "learning_rate": 3.9506172839506175e-06, | |
| "loss": 0.6898, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 2.7911111111111113, | |
| "grad_norm": 402679.625, | |
| "learning_rate": 3.9094650205761325e-06, | |
| "loss": 0.5524, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 2.7933333333333334, | |
| "grad_norm": 299420.40625, | |
| "learning_rate": 3.868312757201646e-06, | |
| "loss": 0.5117, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 2.7955555555555556, | |
| "grad_norm": 611257.6875, | |
| "learning_rate": 3.827160493827161e-06, | |
| "loss": 0.6882, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 2.7977777777777777, | |
| "grad_norm": 574839.9375, | |
| "learning_rate": 3.786008230452675e-06, | |
| "loss": 0.5716, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 492008.53125, | |
| "learning_rate": 3.7448559670781896e-06, | |
| "loss": 0.6211, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.8022222222222224, | |
| "grad_norm": 794452.0, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.5312, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 2.8044444444444445, | |
| "grad_norm": 208639.828125, | |
| "learning_rate": 3.6625514403292183e-06, | |
| "loss": 0.5969, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 2.8066666666666666, | |
| "grad_norm": 2034471.75, | |
| "learning_rate": 3.6213991769547325e-06, | |
| "loss": 0.7033, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 2.8088888888888888, | |
| "grad_norm": 357638.625, | |
| "learning_rate": 3.580246913580247e-06, | |
| "loss": 0.5115, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 2.811111111111111, | |
| "grad_norm": 398690.84375, | |
| "learning_rate": 3.5390946502057612e-06, | |
| "loss": 0.3857, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.8133333333333335, | |
| "grad_norm": 376240.9375, | |
| "learning_rate": 3.497942386831276e-06, | |
| "loss": 0.3986, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 2.8155555555555556, | |
| "grad_norm": 974289.6875, | |
| "learning_rate": 3.45679012345679e-06, | |
| "loss": 0.4911, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 2.8177777777777777, | |
| "grad_norm": 711186.6875, | |
| "learning_rate": 3.4156378600823046e-06, | |
| "loss": 0.4323, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 765747.1875, | |
| "learning_rate": 3.3744855967078196e-06, | |
| "loss": 0.4529, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 2.822222222222222, | |
| "grad_norm": 587382.0625, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.5034, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.8244444444444445, | |
| "grad_norm": 284979.75, | |
| "learning_rate": 3.2921810699588483e-06, | |
| "loss": 0.4863, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 2.8266666666666667, | |
| "grad_norm": 718397.25, | |
| "learning_rate": 3.251028806584362e-06, | |
| "loss": 0.5261, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 2.828888888888889, | |
| "grad_norm": 367435.875, | |
| "learning_rate": 3.209876543209877e-06, | |
| "loss": 0.5775, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 2.8311111111111114, | |
| "grad_norm": 804869.0, | |
| "learning_rate": 3.168724279835391e-06, | |
| "loss": 0.4268, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 893933.8125, | |
| "learning_rate": 3.127572016460906e-06, | |
| "loss": 0.4797, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.8355555555555556, | |
| "grad_norm": 601643.5625, | |
| "learning_rate": 3.0864197530864196e-06, | |
| "loss": 0.64, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 2.8377777777777777, | |
| "grad_norm": 812132.75, | |
| "learning_rate": 3.0452674897119346e-06, | |
| "loss": 0.4334, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 6934080.0, | |
| "learning_rate": 3.0041152263374487e-06, | |
| "loss": 0.5341, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 2.8422222222222224, | |
| "grad_norm": 1309447.25, | |
| "learning_rate": 2.9629629629629633e-06, | |
| "loss": 0.491, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 2.8444444444444446, | |
| "grad_norm": 298212.8125, | |
| "learning_rate": 2.9218106995884775e-06, | |
| "loss": 0.561, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.8466666666666667, | |
| "grad_norm": 2192948.5, | |
| "learning_rate": 2.880658436213992e-06, | |
| "loss": 0.603, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 2.848888888888889, | |
| "grad_norm": 673454.4375, | |
| "learning_rate": 2.8395061728395062e-06, | |
| "loss": 0.4945, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 2.851111111111111, | |
| "grad_norm": 419024.84375, | |
| "learning_rate": 2.798353909465021e-06, | |
| "loss": 0.5274, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 2.8533333333333335, | |
| "grad_norm": 1039461.1875, | |
| "learning_rate": 2.757201646090535e-06, | |
| "loss": 0.5511, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 2.8555555555555556, | |
| "grad_norm": 670416.1875, | |
| "learning_rate": 2.7160493827160496e-06, | |
| "loss": 0.5403, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.8577777777777778, | |
| "grad_norm": 835062.6875, | |
| "learning_rate": 2.6748971193415637e-06, | |
| "loss": 0.4858, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 959062.8125, | |
| "learning_rate": 2.6337448559670783e-06, | |
| "loss": 0.4005, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 2.862222222222222, | |
| "grad_norm": 1145211.875, | |
| "learning_rate": 2.5925925925925925e-06, | |
| "loss": 0.4198, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 2.8644444444444446, | |
| "grad_norm": 1677770.375, | |
| "learning_rate": 2.551440329218107e-06, | |
| "loss": 0.4583, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 2.8666666666666667, | |
| "grad_norm": 1481635.875, | |
| "learning_rate": 2.5102880658436212e-06, | |
| "loss": 0.4812, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.868888888888889, | |
| "grad_norm": 83220.25, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.4624, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 2.871111111111111, | |
| "grad_norm": 414993.90625, | |
| "learning_rate": 2.42798353909465e-06, | |
| "loss": 0.3868, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 2.873333333333333, | |
| "grad_norm": 627476.8125, | |
| "learning_rate": 2.386831275720165e-06, | |
| "loss": 0.4232, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 2.8755555555555556, | |
| "grad_norm": 650311.125, | |
| "learning_rate": 2.345679012345679e-06, | |
| "loss": 0.5251, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 2.8777777777777778, | |
| "grad_norm": 392837.4375, | |
| "learning_rate": 2.3045267489711937e-06, | |
| "loss": 0.4596, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 1472083.375, | |
| "learning_rate": 2.263374485596708e-06, | |
| "loss": 0.5652, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 2.8822222222222225, | |
| "grad_norm": 197789.484375, | |
| "learning_rate": 2.2222222222222225e-06, | |
| "loss": 0.5791, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 2.8844444444444446, | |
| "grad_norm": 416251.53125, | |
| "learning_rate": 2.1810699588477367e-06, | |
| "loss": 0.4691, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 2.8866666666666667, | |
| "grad_norm": 332912.5, | |
| "learning_rate": 2.1399176954732512e-06, | |
| "loss": 0.4653, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 1015003.5, | |
| "learning_rate": 2.0987654320987654e-06, | |
| "loss": 0.5523, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.891111111111111, | |
| "grad_norm": 560585.5625, | |
| "learning_rate": 2.05761316872428e-06, | |
| "loss": 0.6112, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 2.8933333333333335, | |
| "grad_norm": 1809974.5, | |
| "learning_rate": 2.016460905349794e-06, | |
| "loss": 0.5622, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 2.8955555555555557, | |
| "grad_norm": 449177.0625, | |
| "learning_rate": 1.9753086419753087e-06, | |
| "loss": 0.5725, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 2.897777777777778, | |
| "grad_norm": 317168.25, | |
| "learning_rate": 1.934156378600823e-06, | |
| "loss": 0.5962, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 456991.1875, | |
| "learning_rate": 1.8930041152263375e-06, | |
| "loss": 0.563, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.902222222222222, | |
| "grad_norm": 301806.40625, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.5332, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 2.9044444444444446, | |
| "grad_norm": 334963.5, | |
| "learning_rate": 1.8106995884773662e-06, | |
| "loss": 0.5427, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 2.9066666666666667, | |
| "grad_norm": 1178584.125, | |
| "learning_rate": 1.7695473251028806e-06, | |
| "loss": 0.4654, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 2.908888888888889, | |
| "grad_norm": 538667.8125, | |
| "learning_rate": 1.728395061728395e-06, | |
| "loss": 0.5941, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 2.911111111111111, | |
| "grad_norm": 251747.875, | |
| "learning_rate": 1.6872427983539098e-06, | |
| "loss": 0.6021, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.913333333333333, | |
| "grad_norm": 1805239.0, | |
| "learning_rate": 1.6460905349794242e-06, | |
| "loss": 0.5335, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 2.9155555555555557, | |
| "grad_norm": 142086.625, | |
| "learning_rate": 1.6049382716049385e-06, | |
| "loss": 0.4195, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 2.917777777777778, | |
| "grad_norm": 805839.0, | |
| "learning_rate": 1.563786008230453e-06, | |
| "loss": 0.4475, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 714949.875, | |
| "learning_rate": 1.5226337448559673e-06, | |
| "loss": 0.5176, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 2.9222222222222225, | |
| "grad_norm": 359683.90625, | |
| "learning_rate": 1.4814814814814817e-06, | |
| "loss": 0.4859, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.924444444444444, | |
| "grad_norm": 775748.875, | |
| "learning_rate": 1.440329218106996e-06, | |
| "loss": 0.6062, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 2.9266666666666667, | |
| "grad_norm": 826315.4375, | |
| "learning_rate": 1.3991769547325104e-06, | |
| "loss": 0.6138, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 2.928888888888889, | |
| "grad_norm": 377858.5625, | |
| "learning_rate": 1.3580246913580248e-06, | |
| "loss": 0.5518, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 2.931111111111111, | |
| "grad_norm": 3151661.0, | |
| "learning_rate": 1.3168724279835392e-06, | |
| "loss": 0.5037, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 2.9333333333333336, | |
| "grad_norm": 1737743.75, | |
| "learning_rate": 1.2757201646090535e-06, | |
| "loss": 0.5886, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.9355555555555557, | |
| "grad_norm": 281431.375, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.4492, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 2.937777777777778, | |
| "grad_norm": 572928.0, | |
| "learning_rate": 1.1934156378600825e-06, | |
| "loss": 0.7053, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 2890382.0, | |
| "learning_rate": 1.1522633744855969e-06, | |
| "loss": 0.7321, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 2.942222222222222, | |
| "grad_norm": 365583.78125, | |
| "learning_rate": 1.1111111111111112e-06, | |
| "loss": 0.5227, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 2.9444444444444446, | |
| "grad_norm": 586235.625, | |
| "learning_rate": 1.0699588477366256e-06, | |
| "loss": 0.4605, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.9466666666666668, | |
| "grad_norm": 525312.6875, | |
| "learning_rate": 1.02880658436214e-06, | |
| "loss": 0.5955, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 2.948888888888889, | |
| "grad_norm": 1755663.5, | |
| "learning_rate": 9.876543209876544e-07, | |
| "loss": 0.6344, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 2.951111111111111, | |
| "grad_norm": 235478.671875, | |
| "learning_rate": 9.465020576131687e-07, | |
| "loss": 0.617, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 2.953333333333333, | |
| "grad_norm": 1451552.75, | |
| "learning_rate": 9.053497942386831e-07, | |
| "loss": 0.5483, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 2.9555555555555557, | |
| "grad_norm": 696397.1875, | |
| "learning_rate": 8.641975308641975e-07, | |
| "loss": 0.4778, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.957777777777778, | |
| "grad_norm": 415077.21875, | |
| "learning_rate": 8.230452674897121e-07, | |
| "loss": 0.632, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 345612.71875, | |
| "learning_rate": 7.818930041152265e-07, | |
| "loss": 0.4817, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 2.962222222222222, | |
| "grad_norm": 1004227.3125, | |
| "learning_rate": 7.407407407407408e-07, | |
| "loss": 0.4939, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 2.964444444444444, | |
| "grad_norm": 647075.375, | |
| "learning_rate": 6.995884773662552e-07, | |
| "loss": 0.5492, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 2.966666666666667, | |
| "grad_norm": 877828.8125, | |
| "learning_rate": 6.584362139917696e-07, | |
| "loss": 0.6042, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.968888888888889, | |
| "grad_norm": 938837.3125, | |
| "learning_rate": 6.17283950617284e-07, | |
| "loss": 0.4177, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 2.971111111111111, | |
| "grad_norm": 793827.875, | |
| "learning_rate": 5.761316872427984e-07, | |
| "loss": 0.3873, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 2.9733333333333336, | |
| "grad_norm": 594568.5, | |
| "learning_rate": 5.349794238683128e-07, | |
| "loss": 0.4783, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 2.9755555555555553, | |
| "grad_norm": 496043.03125, | |
| "learning_rate": 4.938271604938272e-07, | |
| "loss": 0.6735, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 2.977777777777778, | |
| "grad_norm": 662431.375, | |
| "learning_rate": 4.5267489711934156e-07, | |
| "loss": 0.5808, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 308026.96875, | |
| "learning_rate": 4.1152263374485604e-07, | |
| "loss": 0.5529, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 2.982222222222222, | |
| "grad_norm": 1318756.75, | |
| "learning_rate": 3.703703703703704e-07, | |
| "loss": 0.4589, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 2.9844444444444447, | |
| "grad_norm": 401215.625, | |
| "learning_rate": 3.292181069958848e-07, | |
| "loss": 0.4334, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 2.986666666666667, | |
| "grad_norm": 398647.75, | |
| "learning_rate": 2.880658436213992e-07, | |
| "loss": 0.4161, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 2.988888888888889, | |
| "grad_norm": 701269.9375, | |
| "learning_rate": 2.469135802469136e-07, | |
| "loss": 0.5187, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.991111111111111, | |
| "grad_norm": 816582.25, | |
| "learning_rate": 2.0576131687242802e-07, | |
| "loss": 0.7391, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 2.993333333333333, | |
| "grad_norm": 566105.1875, | |
| "learning_rate": 1.646090534979424e-07, | |
| "loss": 0.5438, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 2.9955555555555557, | |
| "grad_norm": 345409.1875, | |
| "learning_rate": 1.234567901234568e-07, | |
| "loss": 0.5929, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 2.997777777777778, | |
| "grad_norm": 576835.75, | |
| "learning_rate": 8.23045267489712e-08, | |
| "loss": 0.3647, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 242567.28125, | |
| "learning_rate": 4.11522633744856e-08, | |
| "loss": 0.465, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1350, | |
| "total_flos": 5.087834939588608e+17, | |
| "train_loss": 0.5604817596409056, | |
| "train_runtime": 51124.8319, | |
| "train_samples_per_second": 0.422, | |
| "train_steps_per_second": 0.026 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1350, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.087834939588608e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |