|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.996907854050711, |
|
"eval_steps": 500, |
|
"global_step": 505, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.9805, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.9742, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.9398, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5e-06, |
|
"loss": 1.8268, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.9396, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.8366, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.7488, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8458, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.125e-05, |
|
"loss": 1.818, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.7825, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.375e-05, |
|
"loss": 1.8167, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.6685, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.6250000000000002e-05, |
|
"loss": 1.6598, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 1.6377, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.7032, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7164, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9999793628111833e-05, |
|
"loss": 1.6622, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9999174520965194e-05, |
|
"loss": 1.6692, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.999814270411335e-05, |
|
"loss": 1.6883, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.99966982201439e-05, |
|
"loss": 1.698, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.999484112867702e-05, |
|
"loss": 1.7371, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9992571506362997e-05, |
|
"loss": 1.6785, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9989889446879092e-05, |
|
"loss": 1.6484, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9986795060925636e-05, |
|
"loss": 1.6336, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9983288476221482e-05, |
|
"loss": 1.5657, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.997936983749873e-05, |
|
"loss": 1.6443, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.997503930649676e-05, |
|
"loss": 1.6247, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9970297061955533e-05, |
|
"loss": 1.5329, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9965143299608253e-05, |
|
"loss": 1.6595, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.995957823217325e-05, |
|
"loss": 1.4952, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9953602089345215e-05, |
|
"loss": 1.5722, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9947215117785727e-05, |
|
"loss": 1.5964, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9940417581113062e-05, |
|
"loss": 1.6671, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9933209759891318e-05, |
|
"loss": 1.6731, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9925591951618822e-05, |
|
"loss": 1.5952, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9917564470715876e-05, |
|
"loss": 1.6384, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9909127648511758e-05, |
|
"loss": 1.5503, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.990028183323105e-05, |
|
"loss": 1.731, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.989102738997928e-05, |
|
"loss": 1.68, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9881364700727827e-05, |
|
"loss": 1.6194, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9871294164298175e-05, |
|
"loss": 1.6532, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.986081619634545e-05, |
|
"loss": 1.7531, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9849931229341258e-05, |
|
"loss": 1.6211, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9838639712555842e-05, |
|
"loss": 1.6807, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.982694211203952e-05, |
|
"loss": 1.7175, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.981483891060348e-05, |
|
"loss": 1.6584, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9802330607799832e-05, |
|
"loss": 1.5696, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.978941771990098e-05, |
|
"loss": 1.6854, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9776100779878344e-05, |
|
"loss": 1.5761, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.976238033738033e-05, |
|
"loss": 1.6526, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9748256958709666e-05, |
|
"loss": 1.5919, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9733731226800016e-05, |
|
"loss": 1.5532, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9718803741191918e-05, |
|
"loss": 1.6093, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.970347511800806e-05, |
|
"loss": 1.6669, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9687745989927823e-05, |
|
"loss": 1.594, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.967161700616117e-05, |
|
"loss": 1.5944, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.965508883242188e-05, |
|
"loss": 1.6027, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9638162150900028e-05, |
|
"loss": 1.5561, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9620837660233866e-05, |
|
"loss": 1.6239, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.960311607548096e-05, |
|
"loss": 1.6061, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9584998128088686e-05, |
|
"loss": 1.5782, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9566484565864056e-05, |
|
"loss": 1.7248, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.954757615294283e-05, |
|
"loss": 1.5613, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9528273669757974e-05, |
|
"loss": 1.7137, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9508577913007475e-05, |
|
"loss": 1.5516, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9488489695621432e-05, |
|
"loss": 1.5342, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9468009846728515e-05, |
|
"loss": 1.519, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.944713921162174e-05, |
|
"loss": 1.5515, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.942587865172359e-05, |
|
"loss": 1.5703, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9404229044550432e-05, |
|
"loss": 1.5526, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9382191283676336e-05, |
|
"loss": 1.5859, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9359766278696165e-05, |
|
"loss": 1.5213, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9336954955188042e-05, |
|
"loss": 1.6142, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.9313758254675143e-05, |
|
"loss": 1.5944, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.929017713458685e-05, |
|
"loss": 1.4959, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.9266212568219223e-05, |
|
"loss": 1.494, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.9241865544694817e-05, |
|
"loss": 1.5576, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.9217137068921875e-05, |
|
"loss": 1.576, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.9192028161552848e-05, |
|
"loss": 1.6083, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9166539858942258e-05, |
|
"loss": 1.5894, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9140673213103932e-05, |
|
"loss": 1.678, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9114429291667583e-05, |
|
"loss": 1.5729, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.908780917783473e-05, |
|
"loss": 1.6561, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.906081397033401e-05, |
|
"loss": 1.618, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9033444783375806e-05, |
|
"loss": 1.5488, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9005702746606274e-05, |
|
"loss": 1.5514, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8977589005060723e-05, |
|
"loss": 1.47, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8949104719116334e-05, |
|
"loss": 1.5237, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8920251064444284e-05, |
|
"loss": 1.5974, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8891029231961208e-05, |
|
"loss": 1.5083, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.886144042778006e-05, |
|
"loss": 1.6104, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8831485873160312e-05, |
|
"loss": 1.5195, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.880116680445757e-05, |
|
"loss": 1.619, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.8770484473072518e-05, |
|
"loss": 1.5151, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8739440145399295e-05, |
|
"loss": 1.6652, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.8708035102773198e-05, |
|
"loss": 1.5574, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8676270641417824e-05, |
|
"loss": 1.6027, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.864414807239154e-05, |
|
"loss": 1.6076, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.861166872153339e-05, |
|
"loss": 1.65, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.857883392940837e-05, |
|
"loss": 1.5535, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8545645051252094e-05, |
|
"loss": 1.4654, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.851210345691484e-05, |
|
"loss": 1.6053, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.847821053080505e-05, |
|
"loss": 1.5897, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.844396767183215e-05, |
|
"loss": 1.5679, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.8409376293348836e-05, |
|
"loss": 1.4711, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.8374437823092726e-05, |
|
"loss": 1.6241, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.833915370312743e-05, |
|
"loss": 1.5392, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.8303525389783045e-05, |
|
"loss": 1.456, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.8267554353596027e-05, |
|
"loss": 1.5794, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.8231242079248512e-05, |
|
"loss": 1.5491, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.819459006550702e-05, |
|
"loss": 1.5011, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.815759982516061e-05, |
|
"loss": 1.5362, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.812027288495843e-05, |
|
"loss": 1.363, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.808261078554671e-05, |
|
"loss": 1.3702, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.8044615081405153e-05, |
|
"loss": 1.3961, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.8006287340782807e-05, |
|
"loss": 1.4011, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.7967629145633312e-05, |
|
"loss": 1.5261, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.7928642091549616e-05, |
|
"loss": 1.3632, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.7889327787698105e-05, |
|
"loss": 1.4158, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.784968785675221e-05, |
|
"loss": 1.4353, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.7809723934825405e-05, |
|
"loss": 1.4338, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.77694376714037e-05, |
|
"loss": 1.4254, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.772883072927754e-05, |
|
"loss": 1.4691, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.768790478447319e-05, |
|
"loss": 1.3882, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.764666152618355e-05, |
|
"loss": 1.388, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.7605102656698444e-05, |
|
"loss": 1.3245, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.756322989133434e-05, |
|
"loss": 1.4139, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.7521044958363567e-05, |
|
"loss": 1.3521, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7478549598942983e-05, |
|
"loss": 1.2939, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.7435745567042096e-05, |
|
"loss": 1.3955, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.7392634629370684e-05, |
|
"loss": 1.2642, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.734921856530587e-05, |
|
"loss": 1.3465, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.730549916681868e-05, |
|
"loss": 1.3436, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.726147823840007e-05, |
|
"loss": 1.401, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.7217157596986474e-05, |
|
"loss": 1.4414, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.717253907188477e-05, |
|
"loss": 1.3111, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.7127624504696824e-05, |
|
"loss": 1.4064, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.7082415749243436e-05, |
|
"loss": 1.3095, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.7036914671487854e-05, |
|
"loss": 1.4675, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.699112314945874e-05, |
|
"loss": 1.4249, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.694504307317267e-05, |
|
"loss": 1.3799, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.689867634455612e-05, |
|
"loss": 1.4039, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.6852024877366945e-05, |
|
"loss": 1.5129, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.6805090597115424e-05, |
|
"loss": 1.4277, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.675787544098477e-05, |
|
"loss": 1.4467, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.6710381357751155e-05, |
|
"loss": 1.4491, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.6662610307703318e-05, |
|
"loss": 1.4455, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.661456426256161e-05, |
|
"loss": 1.3074, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.6566245205396647e-05, |
|
"loss": 1.4863, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.6517655130547435e-05, |
|
"loss": 1.3705, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.6468796043539082e-05, |
|
"loss": 1.4302, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.641966996099999e-05, |
|
"loss": 1.3508, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6370278910578644e-05, |
|
"loss": 1.3224, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.6320624930859905e-05, |
|
"loss": 1.3957, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.627071007128089e-05, |
|
"loss": 1.4123, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.6220536392046357e-05, |
|
"loss": 1.3938, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.6170105964043698e-05, |
|
"loss": 1.3844, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.6119420868757433e-05, |
|
"loss": 1.3879, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.606848319818333e-05, |
|
"loss": 1.3161, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.6017295054742045e-05, |
|
"loss": 1.4198, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.596585855119233e-05, |
|
"loss": 1.3897, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.5914175810543868e-05, |
|
"loss": 1.3397, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.5862248965969604e-05, |
|
"loss": 1.4773, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.5810080160717737e-05, |
|
"loss": 1.3474, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.575767154802323e-05, |
|
"loss": 1.5057, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.570502529101896e-05, |
|
"loss": 1.3343, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5652143562646416e-05, |
|
"loss": 1.3129, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.5599028545566028e-05, |
|
"loss": 1.2845, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.5545682432067068e-05, |
|
"loss": 1.337, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.5492107423977167e-05, |
|
"loss": 1.3302, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.5438305732571445e-05, |
|
"loss": 1.348, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.5384279578481223e-05, |
|
"loss": 1.3215, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.5330031191602395e-05, |
|
"loss": 1.3139, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.5275562811003363e-05, |
|
"loss": 1.3987, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.522087668483264e-05, |
|
"loss": 1.3642, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.5165975070226045e-05, |
|
"loss": 1.3091, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.5110860233213556e-05, |
|
"loss": 1.2608, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.5055534448625766e-05, |
|
"loss": 1.3476, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.3632, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.4944259179486068e-05, |
|
"loss": 1.3465, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.488831428775164e-05, |
|
"loss": 1.3829, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.4832167633887306e-05, |
|
"loss": 1.3986, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.477582153531126e-05, |
|
"loss": 1.3538, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.4719278317673655e-05, |
|
"loss": 1.4173, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.4662540314760608e-05, |
|
"loss": 1.3762, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.4605609868397874e-05, |
|
"loss": 1.3599, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.4548489328354197e-05, |
|
"loss": 1.324, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4491181052244317e-05, |
|
"loss": 1.2417, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.4433687405431663e-05, |
|
"loss": 1.2887, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.437601076093073e-05, |
|
"loss": 1.3587, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4318153499309118e-05, |
|
"loss": 1.2952, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4260118008589294e-05, |
|
"loss": 1.3474, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.420190668415002e-05, |
|
"loss": 1.348, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4143521928627479e-05, |
|
"loss": 1.3807, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4084966151816124e-05, |
|
"loss": 1.3044, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.4026241770569198e-05, |
|
"loss": 1.4231, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3967351208698985e-05, |
|
"loss": 1.3206, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3908296896876778e-05, |
|
"loss": 1.3744, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3849081272532545e-05, |
|
"loss": 1.4214, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.3789706779754326e-05, |
|
"loss": 1.4233, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.373017586918736e-05, |
|
"loss": 1.3545, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.3670490997932922e-05, |
|
"loss": 1.2513, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.3610654629446938e-05, |
|
"loss": 1.3838, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.3550669233438271e-05, |
|
"loss": 1.3655, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.3490537285766809e-05, |
|
"loss": 1.3503, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.3430261268341272e-05, |
|
"loss": 1.2948, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.3369843669016757e-05, |
|
"loss": 1.3722, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.3309286981492084e-05, |
|
"loss": 1.3155, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.3248593705206838e-05, |
|
"loss": 1.2492, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.3187766345238222e-05, |
|
"loss": 1.3089, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.3126807412197666e-05, |
|
"loss": 1.3134, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.3065719422127188e-05, |
|
"loss": 1.2995, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.3004504896395564e-05, |
|
"loss": 1.3436, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.2943166361594242e-05, |
|
"loss": 1.1327, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.288170634943307e-05, |
|
"loss": 1.1199, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.2820127396635802e-05, |
|
"loss": 1.1588, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.275843204483539e-05, |
|
"loss": 1.1224, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.2696622840469084e-05, |
|
"loss": 1.3147, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.263470233467332e-05, |
|
"loss": 1.1618, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.2572673083178448e-05, |
|
"loss": 1.195, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.2510537646203209e-05, |
|
"loss": 1.1712, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.2448298588349097e-05, |
|
"loss": 1.1937, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.2385958478494487e-05, |
|
"loss": 1.192, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2323519889688615e-05, |
|
"loss": 1.2389, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2260985399045379e-05, |
|
"loss": 1.1535, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2198357587636958e-05, |
|
"loss": 1.1672, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2135639040387291e-05, |
|
"loss": 1.1308, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2072832345965381e-05, |
|
"loss": 1.1642, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2009940096678451e-05, |
|
"loss": 1.0826, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1946964888364949e-05, |
|
"loss": 1.0454, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1883909320287406e-05, |
|
"loss": 1.1272, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1820775995025147e-05, |
|
"loss": 1.0187, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1757567518366883e-05, |
|
"loss": 1.1232, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.169428649920315e-05, |
|
"loss": 1.0917, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1630935549418627e-05, |
|
"loss": 1.1499, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1567517283784344e-05, |
|
"loss": 1.2013, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1504034319849741e-05, |
|
"loss": 1.0021, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1440489277834645e-05, |
|
"loss": 1.145, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1376884780521117e-05, |
|
"loss": 1.0523, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1313223453145202e-05, |
|
"loss": 1.199, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1249507923288563e-05, |
|
"loss": 1.1381, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1185740820770042e-05, |
|
"loss": 1.1056, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1121924777537108e-05, |
|
"loss": 1.152, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.105806242755723e-05, |
|
"loss": 1.2463, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.0994156406709155e-05, |
|
"loss": 1.1991, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.0930209352674123e-05, |
|
"loss": 1.2074, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0866223904826992e-05, |
|
"loss": 1.2137, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0802202704127293e-05, |
|
"loss": 1.1762, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.0738148393010251e-05, |
|
"loss": 1.1095, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.0674063615277681e-05, |
|
"loss": 1.2061, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.0609951015988907e-05, |
|
"loss": 1.1453, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.054581324135156e-05, |
|
"loss": 1.1776, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.0481652938612374e-05, |
|
"loss": 1.1084, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0417472755947908e-05, |
|
"loss": 1.1028, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0353275342355262e-05, |
|
"loss": 1.1285, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.0289063347542727e-05, |
|
"loss": 1.1332, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0224839421820426e-05, |
|
"loss": 1.182, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.0160606215990922e-05, |
|
"loss": 1.1823, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0096366381239808e-05, |
|
"loss": 1.14, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0032122569026284e-05, |
|
"loss": 1.0837, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.967877430973716e-06, |
|
"loss": 1.1733, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.903633618760195e-06, |
|
"loss": 1.128, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.839393784009078e-06, |
|
"loss": 1.1153, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.775160578179575e-06, |
|
"loss": 1.2042, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.710936652457276e-06, |
|
"loss": 1.1457, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.64672465764474e-06, |
|
"loss": 1.2454, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.582527244052095e-06, |
|
"loss": 1.078, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.518347061387629e-06, |
|
"loss": 1.0835, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.454186758648444e-06, |
|
"loss": 1.0497, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.390048984011095e-06, |
|
"loss": 1.0864, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.325936384722322e-06, |
|
"loss": 1.0472, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.261851606989754e-06, |
|
"loss": 1.1284, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.197797295872709e-06, |
|
"loss": 1.0829, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.133776095173015e-06, |
|
"loss": 1.0973, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.069790647325879e-06, |
|
"loss": 1.1343, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.005843593290849e-06, |
|
"loss": 1.1389, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.941937572442773e-06, |
|
"loss": 1.0836, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.878075222462896e-06, |
|
"loss": 1.0652, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.81425917922996e-06, |
|
"loss": 1.0596, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.750492076711439e-06, |
|
"loss": 1.1175, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.6867765468548e-06, |
|
"loss": 1.0994, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.623115219478884e-06, |
|
"loss": 1.1334, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.55951072216536e-06, |
|
"loss": 1.1725, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.49596568015026e-06, |
|
"loss": 1.0953, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.432482716215663e-06, |
|
"loss": 1.1551, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.369064450581374e-06, |
|
"loss": 1.0854, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.305713500796852e-06, |
|
"loss": 1.1419, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.242432481633119e-06, |
|
"loss": 1.0436, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.179224004974857e-06, |
|
"loss": 1.0331, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.116090679712601e-06, |
|
"loss": 1.0594, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.053035111635054e-06, |
|
"loss": 1.0856, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.990059903321554e-06, |
|
"loss": 1.0206, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.927167654034622e-06, |
|
"loss": 1.1111, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.864360959612714e-06, |
|
"loss": 1.1125, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.801642412363042e-06, |
|
"loss": 1.1747, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.739014600954623e-06, |
|
"loss": 1.0466, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.676480110311385e-06, |
|
"loss": 1.1687, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.614041521505517e-06, |
|
"loss": 1.0808, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.5517014116509094e-06, |
|
"loss": 1.1393, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.489462353796792e-06, |
|
"loss": 1.1399, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.427326916821557e-06, |
|
"loss": 1.1977, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.3652976653266785e-06, |
|
"loss": 1.1136, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.303377159530919e-06, |
|
"loss": 1.0574, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.24156795516461e-06, |
|
"loss": 1.1441, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.1798726033642e-06, |
|
"loss": 1.1542, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.118293650566931e-06, |
|
"loss": 1.1092, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.056833638405762e-06, |
|
"loss": 1.064, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.995495103604442e-06, |
|
"loss": 1.1304, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.934280577872814e-06, |
|
"loss": 1.0682, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.87319258780234e-06, |
|
"loss": 1.0223, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.812233654761779e-06, |
|
"loss": 1.0467, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.7514062947931655e-06, |
|
"loss": 1.0722, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.690713018507917e-06, |
|
"loss": 1.0569, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.630156330983244e-06, |
|
"loss": 1.1766, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.569738731658735e-06, |
|
"loss": 0.9828, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.509462714233194e-06, |
|
"loss": 0.9987, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.449330766561735e-06, |
|
"loss": 0.9584, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.389345370553065e-06, |
|
"loss": 0.9931, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.32950900206708e-06, |
|
"loss": 1.1554, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.269824130812645e-06, |
|
"loss": 0.9724, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.210293220245678e-06, |
|
"loss": 1.0328, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.150918727467455e-06, |
|
"loss": 0.9994, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.091703103123223e-06, |
|
"loss": 1.0123, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.032648791301019e-06, |
|
"loss": 1.003, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 5.973758229430806e-06, |
|
"loss": 1.0226, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.91503384818388e-06, |
|
"loss": 0.9729, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.856478071372521e-06, |
|
"loss": 0.9728, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 5.798093315849984e-06, |
|
"loss": 0.9271, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.739881991410707e-06, |
|
"loss": 0.962, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.681846500690884e-06, |
|
"loss": 0.8697, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.623989239069275e-06, |
|
"loss": 0.8456, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.56631259456834e-06, |
|
"loss": 0.9068, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.508818947755687e-06, |
|
"loss": 0.8098, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.451510671645806e-06, |
|
"loss": 0.9104, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.394390131602133e-06, |
|
"loss": 0.904, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.337459685239395e-06, |
|
"loss": 0.9783, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.280721682326349e-06, |
|
"loss": 1.0197, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.224178464688742e-06, |
|
"loss": 0.8167, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.167832366112695e-06, |
|
"loss": 0.9123, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.111685712248364e-06, |
|
"loss": 0.8262, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.055740820513932e-06, |
|
"loss": 1.0102, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.9307, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.944465551374238e-06, |
|
"loss": 0.8742, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.889139766786447e-06, |
|
"loss": 0.9395, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.834024929773956e-06, |
|
"loss": 1.0364, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.779123315167362e-06, |
|
"loss": 1.0022, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.7244371889966374e-06, |
|
"loss": 0.9957, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.669968808397609e-06, |
|
"loss": 1.027, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.61572042151878e-06, |
|
"loss": 0.9526, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.56169426742856e-06, |
|
"loss": 0.9176, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.507892576022838e-06, |
|
"loss": 1.0136, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.4543175679329345e-06, |
|
"loss": 0.959, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.4009714544339755e-06, |
|
"loss": 0.9847, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.347856437353584e-06, |
|
"loss": 0.8823, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.294974708981041e-06, |
|
"loss": 0.917, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.242328451976774e-06, |
|
"loss": 0.9402, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.189919839282265e-06, |
|
"loss": 0.9244, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.1377510340304e-06, |
|
"loss": 1.0061, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.085824189456136e-06, |
|
"loss": 0.9854, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.03414144880767e-06, |
|
"loss": 0.9865, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.982704945257957e-06, |
|
"loss": 0.9202, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.931516801816668e-06, |
|
"loss": 0.971, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.880579131242567e-06, |
|
"loss": 0.9601, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.829894035956306e-06, |
|
"loss": 0.9382, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.779463607953644e-06, |
|
"loss": 0.9866, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.729289928719113e-06, |
|
"loss": 0.9524, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.6793750691400996e-06, |
|
"loss": 1.0385, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.62972108942136e-06, |
|
"loss": 0.8792, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.580330039000014e-06, |
|
"loss": 0.9042, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5312039564609203e-06, |
|
"loss": 0.8561, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.482344869452565e-06, |
|
"loss": 0.948, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.4337547946033557e-06, |
|
"loss": 0.8488, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.3854357374383905e-06, |
|
"loss": 0.9748, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.3373896922966863e-06, |
|
"loss": 0.9182, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.2896186422488463e-06, |
|
"loss": 0.9528, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.242124559015234e-06, |
|
"loss": 0.9543, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.194909402884576e-06, |
|
"loss": 0.9576, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.1479751226330567e-06, |
|
"loss": 0.8864, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.101323655443882e-06, |
|
"loss": 0.9, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.0549569268273316e-06, |
|
"loss": 0.9028, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.0088768505412623e-06, |
|
"loss": 0.9111, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.9630853285121506e-06, |
|
"loss": 0.9345, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.91758425075657e-06, |
|
"loss": 0.9312, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.872375495303178e-06, |
|
"loss": 0.9943, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.8274609281152322e-06, |
|
"loss": 0.9443, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.7828424030135305e-06, |
|
"loss": 0.9603, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.7385217615999303e-06, |
|
"loss": 0.9195, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.694500833181323e-06, |
|
"loss": 0.9257, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.65078143469413e-06, |
|
"loss": 0.8675, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.6073653706293202e-06, |
|
"loss": 0.8377, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.5642544329579088e-06, |
|
"loss": 0.8685, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.5214504010570217e-06, |
|
"loss": 0.8838, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.478955041636435e-06, |
|
"loss": 0.8548, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.4367701086656625e-06, |
|
"loss": 0.9218, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.3948973433015564e-06, |
|
"loss": 0.9963, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.353338473816451e-06, |
|
"loss": 0.9655, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.312095215526814e-06, |
|
"loss": 0.8899, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.271169270722464e-06, |
|
"loss": 0.9778, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.230562328596306e-06, |
|
"loss": 0.946, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.190276065174596e-06, |
|
"loss": 0.9608, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.1503121432477936e-06, |
|
"loss": 0.9379, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.1106722123018965e-06, |
|
"loss": 1.0413, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.0713579084503877e-06, |
|
"loss": 0.9567, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.0323708543666888e-06, |
|
"loss": 0.8854, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.993712659217194e-06, |
|
"loss": 0.952, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.9553849185948514e-06, |
|
"loss": 1.0019, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.9173892144532957e-06, |
|
"loss": 0.965, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.8797271150415709e-06, |
|
"loss": 0.8878, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.8424001748393905e-06, |
|
"loss": 0.9816, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.8054099344929833e-06, |
|
"loss": 0.9631, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.7687579207514893e-06, |
|
"loss": 0.8626, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.7324456464039751e-06, |
|
"loss": 0.8982, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.6964746102169582e-06, |
|
"loss": 0.9542, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.6608462968725736e-06, |
|
"loss": 0.8928, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.6255621769072805e-06, |
|
"loss": 1.0105, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.5906237066511643e-06, |
|
"loss": 0.8821, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.5560323281678514e-06, |
|
"loss": 0.8703, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.521789469194952e-06, |
|
"loss": 0.8437, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.4878965430851612e-06, |
|
"loss": 0.8663, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.4543549487479092e-06, |
|
"loss": 0.9994, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.4211660705916286e-06, |
|
"loss": 0.8591, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.3883312784666091e-06, |
|
"loss": 0.8815, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.3558519276084636e-06, |
|
"loss": 0.8615, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.3237293585821786e-06, |
|
"loss": 0.8511, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.291964897226803e-06, |
|
"loss": 0.8524, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.260559854600709e-06, |
|
"loss": 0.8683, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.2295155269274827e-06, |
|
"loss": 0.8299, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.198833195542435e-06, |
|
"loss": 0.8564, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.1685141268396906e-06, |
|
"loss": 0.7924, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.1385595722199438e-06, |
|
"loss": 0.8414, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.1089707680387962e-06, |
|
"loss": 0.7434, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.079748935555719e-06, |
|
"loss": 0.7299, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.0508952808836682e-06, |
|
"loss": 0.7797, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.022410994939279e-06, |
|
"loss": 0.7147, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.942972533937268e-07, |
|
"loss": 0.8071, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 9.665552166241965e-07, |
|
"loss": 0.7704, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.391860296659916e-07, |
|
"loss": 0.8572, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.121908221652675e-07, |
|
"loss": 0.8579, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 8.855707083324183e-07, |
|
"loss": 0.7123, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.593267868960675e-07, |
|
"loss": 0.7775, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.334601410577436e-07, |
|
"loss": 0.7357, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.079718384471557e-07, |
|
"loss": 0.867, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 7.828629310781266e-07, |
|
"loss": 0.821, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.581344553051873e-07, |
|
"loss": 0.7604, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.337874317807803e-07, |
|
"loss": 0.8181, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.098228654131489e-07, |
|
"loss": 0.8672, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.862417453248593e-07, |
|
"loss": 0.871, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.630450448119618e-07, |
|
"loss": 0.8699, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.402337213038379e-07, |
|
"loss": 0.9146, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.178087163236645e-07, |
|
"loss": 0.8129, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 5.957709554495683e-07, |
|
"loss": 0.7845, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.741213482764118e-07, |
|
"loss": 0.9181, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.528607883782599e-07, |
|
"loss": 0.8529, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.319901532714877e-07, |
|
"loss": 0.8863, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.115103043785718e-07, |
|
"loss": 0.7747, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.91422086992529e-07, |
|
"loss": 0.8225, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.717263302420283e-07, |
|
"loss": 0.8055, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.5242384705717404e-07, |
|
"loss": 0.8073, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.3351543413594263e-07, |
|
"loss": 0.9058, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.150018719113147e-07, |
|
"loss": 0.8856, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.968839245190448e-07, |
|
"loss": 0.8794, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.7916233976613704e-07, |
|
"loss": 0.8192, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.618378490999719e-07, |
|
"loss": 0.8839, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.4491116757812024e-07, |
|
"loss": 0.8596, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.283829938388294e-07, |
|
"loss": 0.8171, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.122540100721794e-07, |
|
"loss": 0.8854, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.965248819919397e-07, |
|
"loss": 0.8724, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.8119625880808185e-07, |
|
"loss": 0.9394, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.66268773199988e-07, |
|
"loss": 0.7804, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.5174304129033655e-07, |
|
"loss": 0.7995, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.3761966261967252e-07, |
|
"loss": 0.7713, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.2389922012165944e-07, |
|
"loss": 0.8227, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.1058228009902094e-07, |
|
"loss": 0.763, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.9766939220017155e-07, |
|
"loss": 0.8846, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8516108939651945e-07, |
|
"loss": 0.8191, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.7305788796048274e-07, |
|
"loss": 0.8646, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.613602874441622e-07, |
|
"loss": 0.8464, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.5006877065874338e-07, |
|
"loss": 0.8767, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.3918380365455232e-07, |
|
"loss": 0.7908, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.287058357018278e-07, |
|
"loss": 0.8039, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.1863529927217731e-07, |
|
"loss": 0.7987, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.0897261002072223e-07, |
|
"loss": 0.8231, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 9.971816676894952e-08, |
|
"loss": 0.8304, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.08723514882437e-08, |
|
"loss": 0.846, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 8.243552928412501e-08, |
|
"loss": 0.8961, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.440804838117932e-08, |
|
"loss": 0.8834, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 6.679024010868617e-08, |
|
"loss": 0.8625, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 5.958241888693872e-08, |
|
"loss": 0.8292, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.2784882214274025e-08, |
|
"loss": 0.8513, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.639791065478738e-08, |
|
"loss": 0.7609, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.042176782675267e-08, |
|
"loss": 0.7706, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.485670039174882e-08, |
|
"loss": 0.7774, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.9702938044468e-08, |
|
"loss": 0.8261, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.496069350324537e-08, |
|
"loss": 0.7775, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.063016250127281e-08, |
|
"loss": 0.8115, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.671152377852092e-08, |
|
"loss": 0.9538, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.320493907436604e-08, |
|
"loss": 0.8701, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.0110553120908917e-08, |
|
"loss": 0.8319, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 7.42849363700282e-09, |
|
"loss": 0.8792, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 5.158871322984426e-09, |
|
"loss": 0.8577, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.3017798561030268e-09, |
|
"loss": 0.9151, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.8572958866514e-09, |
|
"loss": 0.8622, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 8.254790348072306e-10, |
|
"loss": 0.9606, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.063718881695209e-10, |
|
"loss": 0.8872, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.819, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 505, |
|
"total_flos": 2.6761493632096666e+18, |
|
"train_loss": 1.198445222283354, |
|
"train_runtime": 29037.4355, |
|
"train_samples_per_second": 2.227, |
|
"train_steps_per_second": 0.017 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 505, |
|
"num_train_epochs": 5, |
|
"save_steps": 101, |
|
"total_flos": 2.6761493632096666e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|