|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 27.533333333333335, |
|
"eval_steps": 500, |
|
"global_step": 330, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 0.19567996263504028, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 0.94, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 0.2986994683742523, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 1.0076, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.28921350836753845, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 0.9695, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 0.30233919620513916, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 0.8687, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.36907249689102173, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 1.099, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.38191747665405273, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.2947, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.2860971689224243, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 0.9469, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.2793874442577362, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.032, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.28660792112350464, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 1.0493, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.23447971045970917, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.051, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.5307306051254272, |
|
"learning_rate": 5.5e-07, |
|
"loss": 1.0976, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.49731138348579407, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 0.9379, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.0888888888888888, |
|
"grad_norm": 0.2752147614955902, |
|
"learning_rate": 6.5e-07, |
|
"loss": 0.9381, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.1777777777777778, |
|
"grad_norm": 0.34284281730651855, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 0.9569, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.2666666666666666, |
|
"grad_norm": 0.25561222434043884, |
|
"learning_rate": 7.5e-07, |
|
"loss": 1.0227, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.3555555555555556, |
|
"grad_norm": 0.2456677258014679, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.0438, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.29848751425743103, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 1.1062, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.5333333333333332, |
|
"grad_norm": 0.28204187750816345, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 0.8878, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.6222222222222222, |
|
"grad_norm": 0.26430532336235046, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 1.0275, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.7111111111111112, |
|
"grad_norm": 0.26862281560897827, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.9946, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.36035194993019104, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 1.0708, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.23725222051143646, |
|
"learning_rate": 1.1e-06, |
|
"loss": 1.101, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 1.9777777777777779, |
|
"grad_norm": 0.3509962856769562, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 0.9893, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.26212382316589355, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.1786, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.088888888888889, |
|
"grad_norm": 0.32545071840286255, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0819, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.1777777777777776, |
|
"grad_norm": 0.23323898017406464, |
|
"learning_rate": 1.3e-06, |
|
"loss": 1.0393, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.2666666666666666, |
|
"grad_norm": 0.2809179723262787, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 1.1029, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.3555555555555556, |
|
"grad_norm": 0.28984516859054565, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.0597, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 0.3196355402469635, |
|
"learning_rate": 1.45e-06, |
|
"loss": 0.9494, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 2.533333333333333, |
|
"grad_norm": 0.2864663600921631, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.944, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 2.6222222222222222, |
|
"grad_norm": 0.24206970632076263, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 1.023, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 2.7111111111111112, |
|
"grad_norm": 0.2996947765350342, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.9772, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.28879255056381226, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 1.0928, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 0.27944567799568176, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 0.9012, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 2.977777777777778, |
|
"grad_norm": 0.2939954400062561, |
|
"learning_rate": 1.75e-06, |
|
"loss": 1.0461, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.5568829774856567, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 0.9155, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.088888888888889, |
|
"grad_norm": 0.3321582078933716, |
|
"learning_rate": 1.85e-06, |
|
"loss": 1.0048, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.1777777777777776, |
|
"grad_norm": 0.29956573247909546, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 0.9085, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 3.2666666666666666, |
|
"grad_norm": 0.2925553023815155, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 0.8711, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 3.3555555555555556, |
|
"grad_norm": 0.32938718795776367, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0397, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 3.4444444444444446, |
|
"grad_norm": 0.37126410007476807, |
|
"learning_rate": 2.05e-06, |
|
"loss": 1.0436, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 3.533333333333333, |
|
"grad_norm": 0.30750295519828796, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.1212, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 3.6222222222222222, |
|
"grad_norm": 0.27158430218696594, |
|
"learning_rate": 2.15e-06, |
|
"loss": 1.0738, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 3.7111111111111112, |
|
"grad_norm": 0.19385884702205658, |
|
"learning_rate": 2.2e-06, |
|
"loss": 0.9962, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.3193662464618683, |
|
"learning_rate": 2.25e-06, |
|
"loss": 1.121, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 3.888888888888889, |
|
"grad_norm": 0.30041366815567017, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 1.0232, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 3.977777777777778, |
|
"grad_norm": 0.3569968640804291, |
|
"learning_rate": 2.35e-06, |
|
"loss": 1.0396, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.0826685428619385, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.0178, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 4.088888888888889, |
|
"grad_norm": 0.3048429787158966, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 1.0127, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 4.177777777777778, |
|
"grad_norm": 0.6735008955001831, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0034, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 0.28620263934135437, |
|
"learning_rate": 2.55e-06, |
|
"loss": 1.0821, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 4.355555555555555, |
|
"grad_norm": 0.2925148904323578, |
|
"learning_rate": 2.6e-06, |
|
"loss": 0.8747, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 0.28806746006011963, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 1.0981, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 4.533333333333333, |
|
"grad_norm": 0.2822423279285431, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 0.8774, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 4.622222222222222, |
|
"grad_norm": 0.30998173356056213, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 1.0841, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 4.711111111111111, |
|
"grad_norm": 0.3339422047138214, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.0752, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.3425965905189514, |
|
"learning_rate": 2.85e-06, |
|
"loss": 1.087, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 4.888888888888889, |
|
"grad_norm": 0.2678565979003906, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.0578, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 4.977777777777778, |
|
"grad_norm": 0.3225661516189575, |
|
"learning_rate": 2.95e-06, |
|
"loss": 0.9329, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.5934492945671082, |
|
"learning_rate": 3e-06, |
|
"loss": 0.9952, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 5.088888888888889, |
|
"grad_norm": 0.41927722096443176, |
|
"learning_rate": 3.05e-06, |
|
"loss": 1.2386, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 5.177777777777778, |
|
"grad_norm": 0.3196963667869568, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 0.9, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 5.266666666666667, |
|
"grad_norm": 0.31587812304496765, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 1.0832, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 5.355555555555555, |
|
"grad_norm": 0.34145388007164, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.9822, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 5.444444444444445, |
|
"grad_norm": 0.302914023399353, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.9968, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 5.533333333333333, |
|
"grad_norm": 0.22116686403751373, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 0.9124, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 5.622222222222222, |
|
"grad_norm": 0.3197582960128784, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 1.1134, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 5.711111111111111, |
|
"grad_norm": 0.31704509258270264, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.879, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 0.35378602147102356, |
|
"learning_rate": 3.45e-06, |
|
"loss": 1.103, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 5.888888888888889, |
|
"grad_norm": 0.33167579770088196, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.0124, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 5.977777777777778, |
|
"grad_norm": 0.30484288930892944, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 1.001, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.4108036458492279, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.0199, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 6.088888888888889, |
|
"grad_norm": 0.3657933175563812, |
|
"learning_rate": 3.65e-06, |
|
"loss": 1.1584, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 6.177777777777778, |
|
"grad_norm": 0.6758050918579102, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.0419, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 6.266666666666667, |
|
"grad_norm": 0.35230952501296997, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.856, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 6.355555555555555, |
|
"grad_norm": 0.38534823060035706, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 0.9925, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 6.444444444444445, |
|
"grad_norm": 0.32009822130203247, |
|
"learning_rate": 3.85e-06, |
|
"loss": 1.0069, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 6.533333333333333, |
|
"grad_norm": 0.32822826504707336, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 0.953, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 6.622222222222222, |
|
"grad_norm": 0.36539173126220703, |
|
"learning_rate": 3.95e-06, |
|
"loss": 1.1169, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 6.711111111111111, |
|
"grad_norm": 0.3481679856777191, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9554, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 0.4089198410511017, |
|
"learning_rate": 4.05e-06, |
|
"loss": 1.0723, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 6.888888888888889, |
|
"grad_norm": 0.3250631093978882, |
|
"learning_rate": 4.1e-06, |
|
"loss": 1.0264, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 6.977777777777778, |
|
"grad_norm": 0.26394304633140564, |
|
"learning_rate": 4.15e-06, |
|
"loss": 0.9445, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 0.5080349445343018, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.1842, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 7.088888888888889, |
|
"grad_norm": 0.349873811006546, |
|
"learning_rate": 4.25e-06, |
|
"loss": 1.0707, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 7.177777777777778, |
|
"grad_norm": 0.4573216140270233, |
|
"learning_rate": 4.3e-06, |
|
"loss": 1.116, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 7.266666666666667, |
|
"grad_norm": 0.32420143485069275, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 0.8747, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 7.355555555555555, |
|
"grad_norm": 0.3515697717666626, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.9918, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 7.444444444444445, |
|
"grad_norm": 0.404070645570755, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 1.1398, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 7.533333333333333, |
|
"grad_norm": 0.3488151729106903, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.9042, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 7.622222222222222, |
|
"grad_norm": 0.3190948963165283, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 0.9608, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 7.711111111111111, |
|
"grad_norm": 0.3400370478630066, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.9085, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"grad_norm": 0.3799091577529907, |
|
"learning_rate": 4.65e-06, |
|
"loss": 1.0053, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 7.888888888888889, |
|
"grad_norm": 0.31531545519828796, |
|
"learning_rate": 4.7e-06, |
|
"loss": 1.1269, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 7.977777777777778, |
|
"grad_norm": 0.540896475315094, |
|
"learning_rate": 4.75e-06, |
|
"loss": 1.1115, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.4667949080467224, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.8602, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 8.088888888888889, |
|
"grad_norm": 0.29491057991981506, |
|
"learning_rate": 4.85e-06, |
|
"loss": 1.0071, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 8.177777777777777, |
|
"grad_norm": 0.4064798355102539, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.847, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 8.266666666666667, |
|
"grad_norm": 0.35105201601982117, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.9321, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 8.355555555555556, |
|
"grad_norm": 0.42249858379364014, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9997, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 8.444444444444445, |
|
"grad_norm": 0.46596774458885193, |
|
"learning_rate": 4.9997667899113055e-06, |
|
"loss": 0.9206, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 8.533333333333333, |
|
"grad_norm": 0.4481567442417145, |
|
"learning_rate": 4.999067203154777e-06, |
|
"loss": 1.0383, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 8.622222222222222, |
|
"grad_norm": 0.389378160238266, |
|
"learning_rate": 4.997901370250966e-06, |
|
"loss": 1.2077, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 8.71111111111111, |
|
"grad_norm": 0.386259526014328, |
|
"learning_rate": 4.99626950870707e-06, |
|
"loss": 0.9617, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 0.38413456082344055, |
|
"learning_rate": 4.994171922976349e-06, |
|
"loss": 0.9182, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"grad_norm": 0.4302501082420349, |
|
"learning_rate": 4.991609004401324e-06, |
|
"loss": 1.1561, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 8.977777777777778, |
|
"grad_norm": 0.4424130320549011, |
|
"learning_rate": 4.988581231140772e-06, |
|
"loss": 1.135, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 0.3800254762172699, |
|
"learning_rate": 4.985089168080509e-06, |
|
"loss": 0.7612, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 9.088888888888889, |
|
"grad_norm": 0.46646955609321594, |
|
"learning_rate": 4.981133466728004e-06, |
|
"loss": 0.9991, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 9.177777777777777, |
|
"grad_norm": 0.4952200949192047, |
|
"learning_rate": 4.976714865090827e-06, |
|
"loss": 0.9779, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 9.266666666666667, |
|
"grad_norm": 0.4172927737236023, |
|
"learning_rate": 4.971834187538963e-06, |
|
"loss": 0.9449, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 9.355555555555556, |
|
"grad_norm": 0.4364710748195648, |
|
"learning_rate": 4.966492344651006e-06, |
|
"loss": 0.8786, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 9.444444444444445, |
|
"grad_norm": 0.4623521566390991, |
|
"learning_rate": 4.960690333044279e-06, |
|
"loss": 1.0798, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 9.533333333333333, |
|
"grad_norm": 0.4485428035259247, |
|
"learning_rate": 4.954429235188897e-06, |
|
"loss": 1.182, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 9.622222222222222, |
|
"grad_norm": 0.38542506098747253, |
|
"learning_rate": 4.947710219205808e-06, |
|
"loss": 0.9882, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 9.71111111111111, |
|
"grad_norm": 0.35373494029045105, |
|
"learning_rate": 4.940534538648862e-06, |
|
"loss": 0.9649, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"grad_norm": 0.5160645246505737, |
|
"learning_rate": 4.932903532270939e-06, |
|
"loss": 1.0484, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 9.88888888888889, |
|
"grad_norm": 0.3412460684776306, |
|
"learning_rate": 4.924818623774178e-06, |
|
"loss": 0.8972, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 9.977777777777778, |
|
"grad_norm": 0.4623534083366394, |
|
"learning_rate": 4.916281321544362e-06, |
|
"loss": 0.9265, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.4575502872467041, |
|
"learning_rate": 4.907293218369499e-06, |
|
"loss": 1.1764, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 10.088888888888889, |
|
"grad_norm": 0.4604892134666443, |
|
"learning_rate": 4.897855991142658e-06, |
|
"loss": 0.9429, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 10.177777777777777, |
|
"grad_norm": 0.4723954200744629, |
|
"learning_rate": 4.8879714005491205e-06, |
|
"loss": 1.0113, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 10.266666666666667, |
|
"grad_norm": 0.419286847114563, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 1.0417, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 10.355555555555556, |
|
"grad_norm": 0.33823734521865845, |
|
"learning_rate": 4.8668675889776095e-06, |
|
"loss": 0.9355, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 10.444444444444445, |
|
"grad_norm": 0.5317716598510742, |
|
"learning_rate": 4.855652305297052e-06, |
|
"loss": 0.829, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 10.533333333333333, |
|
"grad_norm": 0.5897918343544006, |
|
"learning_rate": 4.843997532110051e-06, |
|
"loss": 1.003, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 10.622222222222222, |
|
"grad_norm": 0.4935283064842224, |
|
"learning_rate": 4.83190544382516e-06, |
|
"loss": 1.0789, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 10.71111111111111, |
|
"grad_norm": 0.575020432472229, |
|
"learning_rate": 4.819378296439962e-06, |
|
"loss": 1.0207, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"grad_norm": 0.7120870351791382, |
|
"learning_rate": 4.80641842712018e-06, |
|
"loss": 0.8893, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 10.88888888888889, |
|
"grad_norm": 0.445549875497818, |
|
"learning_rate": 4.793028253763633e-06, |
|
"loss": 1.0629, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 10.977777777777778, |
|
"grad_norm": 0.372715026140213, |
|
"learning_rate": 4.7792102745491345e-06, |
|
"loss": 0.986, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 0.6868900656700134, |
|
"learning_rate": 4.764967067470409e-06, |
|
"loss": 1.2309, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 11.088888888888889, |
|
"grad_norm": 0.47809746861457825, |
|
"learning_rate": 4.750301289855128e-06, |
|
"loss": 1.0157, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 11.177777777777777, |
|
"grad_norm": 0.4651176631450653, |
|
"learning_rate": 4.735215677869129e-06, |
|
"loss": 1.0521, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 11.266666666666667, |
|
"grad_norm": 0.4832024574279785, |
|
"learning_rate": 4.7197130460059385e-06, |
|
"loss": 0.861, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 11.355555555555556, |
|
"grad_norm": 0.508399486541748, |
|
"learning_rate": 4.7037962865616795e-06, |
|
"loss": 1.0256, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 11.444444444444445, |
|
"grad_norm": 0.5031757950782776, |
|
"learning_rate": 4.687468369095457e-06, |
|
"loss": 1.0302, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 11.533333333333333, |
|
"grad_norm": 0.4271880090236664, |
|
"learning_rate": 4.6707323398753346e-06, |
|
"loss": 0.9272, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 11.622222222222222, |
|
"grad_norm": 0.41679927706718445, |
|
"learning_rate": 4.6535913213100005e-06, |
|
"loss": 0.9849, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 11.71111111111111, |
|
"grad_norm": 0.4784274995326996, |
|
"learning_rate": 4.636048511366222e-06, |
|
"loss": 0.7549, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"grad_norm": 0.5748984217643738, |
|
"learning_rate": 4.618107182972209e-06, |
|
"loss": 0.9387, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 11.88888888888889, |
|
"grad_norm": 0.5074996948242188, |
|
"learning_rate": 4.599770683406992e-06, |
|
"loss": 1.1931, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 11.977777777777778, |
|
"grad_norm": 0.46601927280426025, |
|
"learning_rate": 4.58104243367592e-06, |
|
"loss": 0.8533, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 0.8853896260261536, |
|
"learning_rate": 4.561925927872421e-06, |
|
"loss": 1.2318, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 12.088888888888889, |
|
"grad_norm": 0.4770624041557312, |
|
"learning_rate": 4.542424732526105e-06, |
|
"loss": 0.877, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 12.177777777777777, |
|
"grad_norm": 0.5245800018310547, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.9198, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 12.266666666666667, |
|
"grad_norm": 0.8387463092803955, |
|
"learning_rate": 4.5022828974986044e-06, |
|
"loss": 0.9225, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 12.355555555555556, |
|
"grad_norm": 0.4439677596092224, |
|
"learning_rate": 4.481649747002146e-06, |
|
"loss": 1.0818, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 12.444444444444445, |
|
"grad_norm": 0.4621722996234894, |
|
"learning_rate": 4.460646883935079e-06, |
|
"loss": 0.9635, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 12.533333333333333, |
|
"grad_norm": 0.4656592607498169, |
|
"learning_rate": 4.43927822676105e-06, |
|
"loss": 0.8971, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 12.622222222222222, |
|
"grad_norm": 0.4961630702018738, |
|
"learning_rate": 4.417547762189207e-06, |
|
"loss": 0.9474, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 12.71111111111111, |
|
"grad_norm": 0.5954545736312866, |
|
"learning_rate": 4.395459544430407e-06, |
|
"loss": 1.0403, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"grad_norm": 0.7063807249069214, |
|
"learning_rate": 4.373017694440828e-06, |
|
"loss": 1.0076, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 12.88888888888889, |
|
"grad_norm": 0.48963138461112976, |
|
"learning_rate": 4.35022639915313e-06, |
|
"loss": 0.9405, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 12.977777777777778, |
|
"grad_norm": 0.6047050952911377, |
|
"learning_rate": 4.32708991069531e-06, |
|
"loss": 0.9689, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 0.779731273651123, |
|
"learning_rate": 4.30361254559739e-06, |
|
"loss": 0.8877, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 13.088888888888889, |
|
"grad_norm": 0.6274661421775818, |
|
"learning_rate": 4.279798683986084e-06, |
|
"loss": 0.9697, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 13.177777777777777, |
|
"grad_norm": 0.5181424021720886, |
|
"learning_rate": 4.255652768767619e-06, |
|
"loss": 0.8949, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 13.266666666666667, |
|
"grad_norm": 0.4889301657676697, |
|
"learning_rate": 4.2311793047988145e-06, |
|
"loss": 0.941, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 13.355555555555556, |
|
"grad_norm": 0.5373956561088562, |
|
"learning_rate": 4.206382858046636e-06, |
|
"loss": 0.792, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 13.444444444444445, |
|
"grad_norm": 0.4251907765865326, |
|
"learning_rate": 4.181268054736319e-06, |
|
"loss": 0.9622, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 13.533333333333333, |
|
"grad_norm": 0.3853691518306732, |
|
"learning_rate": 4.15583958048827e-06, |
|
"loss": 0.8678, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 13.622222222222222, |
|
"grad_norm": 0.5543946027755737, |
|
"learning_rate": 4.130102179443877e-06, |
|
"loss": 0.8901, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 13.71111111111111, |
|
"grad_norm": 0.6818388104438782, |
|
"learning_rate": 4.104060653380403e-06, |
|
"loss": 0.9957, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"grad_norm": 0.6395586729049683, |
|
"learning_rate": 4.077719860815132e-06, |
|
"loss": 1.0817, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 13.88888888888889, |
|
"grad_norm": 0.6789353489875793, |
|
"learning_rate": 4.051084716098921e-06, |
|
"loss": 1.1538, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 13.977777777777778, |
|
"grad_norm": 0.4569081664085388, |
|
"learning_rate": 4.024160188499337e-06, |
|
"loss": 0.9047, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 1.7882684469223022, |
|
"learning_rate": 3.996951301273556e-06, |
|
"loss": 0.9499, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 14.088888888888889, |
|
"grad_norm": 0.6156724691390991, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.9101, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 14.177777777777777, |
|
"grad_norm": 0.5264705419540405, |
|
"learning_rate": 3.941700805287169e-06, |
|
"loss": 1.0762, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 14.266666666666667, |
|
"grad_norm": 0.5197077989578247, |
|
"learning_rate": 3.913669504505015e-06, |
|
"loss": 1.0859, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 14.355555555555556, |
|
"grad_norm": 0.5074746608734131, |
|
"learning_rate": 3.8853744581304376e-06, |
|
"loss": 0.8807, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 14.444444444444445, |
|
"grad_norm": 0.5885288119316101, |
|
"learning_rate": 3.856820945115655e-06, |
|
"loss": 0.7543, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 14.533333333333333, |
|
"grad_norm": 0.5903788805007935, |
|
"learning_rate": 3.828014292634508e-06, |
|
"loss": 0.8641, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 14.622222222222222, |
|
"grad_norm": 0.5170984268188477, |
|
"learning_rate": 3.798959875088584e-06, |
|
"loss": 0.8557, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 14.71111111111111, |
|
"grad_norm": 0.4823358356952667, |
|
"learning_rate": 3.769663113104516e-06, |
|
"loss": 0.8084, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"grad_norm": 0.5628538131713867, |
|
"learning_rate": 3.7401294725226707e-06, |
|
"loss": 1.0103, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 14.88888888888889, |
|
"grad_norm": 0.575435221195221, |
|
"learning_rate": 3.7103644633774015e-06, |
|
"loss": 0.9011, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 14.977777777777778, |
|
"grad_norm": 0.7047615051269531, |
|
"learning_rate": 3.680373638869047e-06, |
|
"loss": 0.9671, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.747231125831604, |
|
"learning_rate": 3.650162594327881e-06, |
|
"loss": 1.1957, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 15.088888888888889, |
|
"grad_norm": 0.6477513313293457, |
|
"learning_rate": 3.6197369661702052e-06, |
|
"loss": 1.0547, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 15.177777777777777, |
|
"grad_norm": 0.44793015718460083, |
|
"learning_rate": 3.589102430846773e-06, |
|
"loss": 0.8339, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 15.266666666666667, |
|
"grad_norm": 0.4845152199268341, |
|
"learning_rate": 3.5582647037837446e-06, |
|
"loss": 0.933, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 15.355555555555556, |
|
"grad_norm": 0.5620778203010559, |
|
"learning_rate": 3.527229538316371e-06, |
|
"loss": 0.8677, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 15.444444444444445, |
|
"grad_norm": 1.1254522800445557, |
|
"learning_rate": 3.4960027246156043e-06, |
|
"loss": 0.9953, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 15.533333333333333, |
|
"grad_norm": 0.5396095514297485, |
|
"learning_rate": 3.4645900886078388e-06, |
|
"loss": 0.9982, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 15.622222222222222, |
|
"grad_norm": 0.6709286570549011, |
|
"learning_rate": 3.432997490887979e-06, |
|
"loss": 0.8604, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 15.71111111111111, |
|
"grad_norm": 0.655689001083374, |
|
"learning_rate": 3.4012308256260366e-06, |
|
"loss": 0.7839, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"grad_norm": 0.6764020323753357, |
|
"learning_rate": 3.369296019467473e-06, |
|
"loss": 0.8544, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 15.88888888888889, |
|
"grad_norm": 0.5892664194107056, |
|
"learning_rate": 3.3371990304274654e-06, |
|
"loss": 0.8877, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 15.977777777777778, |
|
"grad_norm": 0.5942324995994568, |
|
"learning_rate": 3.304945846779346e-06, |
|
"loss": 0.9024, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.7737208008766174, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 1.1495, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 16.08888888888889, |
|
"grad_norm": 0.6798277497291565, |
|
"learning_rate": 3.239994993334059e-06, |
|
"loss": 0.9458, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 16.177777777777777, |
|
"grad_norm": 0.5800243020057678, |
|
"learning_rate": 3.207309441292325e-06, |
|
"loss": 0.881, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 16.266666666666666, |
|
"grad_norm": 0.626613438129425, |
|
"learning_rate": 3.174491927892561e-06, |
|
"loss": 0.9062, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 16.355555555555554, |
|
"grad_norm": 0.6165486574172974, |
|
"learning_rate": 3.1415485758349344e-06, |
|
"loss": 1.002, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 16.444444444444443, |
|
"grad_norm": 0.6354159116744995, |
|
"learning_rate": 3.1084855312970897e-06, |
|
"loss": 0.9224, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 16.533333333333335, |
|
"grad_norm": 0.6106343865394592, |
|
"learning_rate": 3.0753089627874668e-06, |
|
"loss": 0.8111, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 16.622222222222224, |
|
"grad_norm": 0.5856066346168518, |
|
"learning_rate": 3.0420250599944525e-06, |
|
"loss": 1.1155, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 16.711111111111112, |
|
"grad_norm": 0.6578230857849121, |
|
"learning_rate": 3.0086400326315853e-06, |
|
"loss": 0.7782, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"grad_norm": 0.5129030346870422, |
|
"learning_rate": 2.9751601092790185e-06, |
|
"loss": 0.8999, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 16.88888888888889, |
|
"grad_norm": 0.5636340975761414, |
|
"learning_rate": 2.941591536221469e-06, |
|
"loss": 0.8363, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 16.977777777777778, |
|
"grad_norm": 0.6733406186103821, |
|
"learning_rate": 2.907940576282856e-06, |
|
"loss": 0.8183, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 1.0829391479492188, |
|
"learning_rate": 2.8742135076578608e-06, |
|
"loss": 1.1636, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 17.08888888888889, |
|
"grad_norm": 0.5469006299972534, |
|
"learning_rate": 2.840416622740617e-06, |
|
"loss": 0.8142, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 17.177777777777777, |
|
"grad_norm": 0.6195508241653442, |
|
"learning_rate": 2.8065562269507464e-06, |
|
"loss": 0.8903, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 17.266666666666666, |
|
"grad_norm": 0.5448200702667236, |
|
"learning_rate": 2.7726386375569748e-06, |
|
"loss": 0.9834, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 17.355555555555554, |
|
"grad_norm": 0.667235791683197, |
|
"learning_rate": 2.7386701824985257e-06, |
|
"loss": 0.6709, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 17.444444444444443, |
|
"grad_norm": 0.48227953910827637, |
|
"learning_rate": 2.7046571992045334e-06, |
|
"loss": 0.8927, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 17.533333333333335, |
|
"grad_norm": 1.3036866188049316, |
|
"learning_rate": 2.670606033411678e-06, |
|
"loss": 0.9977, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 17.622222222222224, |
|
"grad_norm": 0.7368432879447937, |
|
"learning_rate": 2.636523037980275e-06, |
|
"loss": 0.9036, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 17.711111111111112, |
|
"grad_norm": 0.49763280153274536, |
|
"learning_rate": 2.602414571709036e-06, |
|
"loss": 0.9087, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"grad_norm": 0.7820340394973755, |
|
"learning_rate": 2.5682869981487154e-06, |
|
"loss": 0.9693, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 17.88888888888889, |
|
"grad_norm": 0.6577602624893188, |
|
"learning_rate": 2.5341466844148775e-06, |
|
"loss": 0.8481, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 17.977777777777778, |
|
"grad_norm": 0.8875017166137695, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0045, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 2.128192663192749, |
|
"learning_rate": 2.465853315585123e-06, |
|
"loss": 1.0231, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 18.08888888888889, |
|
"grad_norm": 0.5647242665290833, |
|
"learning_rate": 2.431713001851286e-06, |
|
"loss": 0.8498, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 18.177777777777777, |
|
"grad_norm": 0.6913108825683594, |
|
"learning_rate": 2.3975854282909645e-06, |
|
"loss": 1.0558, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 18.266666666666666, |
|
"grad_norm": 0.5946425795555115, |
|
"learning_rate": 2.3634769620197253e-06, |
|
"loss": 0.8356, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 18.355555555555554, |
|
"grad_norm": 0.9023786187171936, |
|
"learning_rate": 2.3293939665883233e-06, |
|
"loss": 0.959, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 18.444444444444443, |
|
"grad_norm": 0.6683367490768433, |
|
"learning_rate": 2.2953428007954682e-06, |
|
"loss": 0.7641, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 18.533333333333335, |
|
"grad_norm": 0.7781148552894592, |
|
"learning_rate": 2.261329817501475e-06, |
|
"loss": 0.9507, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 18.622222222222224, |
|
"grad_norm": 0.5396183133125305, |
|
"learning_rate": 2.2273613624430256e-06, |
|
"loss": 0.9078, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 18.711111111111112, |
|
"grad_norm": 0.6765702962875366, |
|
"learning_rate": 2.1934437730492544e-06, |
|
"loss": 0.8616, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"grad_norm": 0.6181918978691101, |
|
"learning_rate": 2.159583377259384e-06, |
|
"loss": 0.7377, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 18.88888888888889, |
|
"grad_norm": 0.7162302732467651, |
|
"learning_rate": 2.1257864923421405e-06, |
|
"loss": 0.9405, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 18.977777777777778, |
|
"grad_norm": 0.6811490654945374, |
|
"learning_rate": 2.092059423717145e-06, |
|
"loss": 0.874, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 0.6545157432556152, |
|
"learning_rate": 2.0584084637785316e-06, |
|
"loss": 0.6244, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 19.08888888888889, |
|
"grad_norm": 0.5392009615898132, |
|
"learning_rate": 2.0248398907209827e-06, |
|
"loss": 0.8523, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 19.177777777777777, |
|
"grad_norm": 0.5963950157165527, |
|
"learning_rate": 1.991359967368416e-06, |
|
"loss": 0.6882, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 19.266666666666666, |
|
"grad_norm": 0.6452714204788208, |
|
"learning_rate": 1.957974940005548e-06, |
|
"loss": 0.7386, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 19.355555555555554, |
|
"grad_norm": 0.6710150241851807, |
|
"learning_rate": 1.9246910372125345e-06, |
|
"loss": 0.9825, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 19.444444444444443, |
|
"grad_norm": 0.5551663637161255, |
|
"learning_rate": 1.8915144687029107e-06, |
|
"loss": 0.9288, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 19.533333333333335, |
|
"grad_norm": 0.7859821319580078, |
|
"learning_rate": 1.8584514241650667e-06, |
|
"loss": 1.0302, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 19.622222222222224, |
|
"grad_norm": 0.580791175365448, |
|
"learning_rate": 1.8255080721074391e-06, |
|
"loss": 0.9244, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 19.711111111111112, |
|
"grad_norm": 0.7111539244651794, |
|
"learning_rate": 1.792690558707675e-06, |
|
"loss": 0.8465, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"grad_norm": 0.8440479636192322, |
|
"learning_rate": 1.7600050066659418e-06, |
|
"loss": 0.9308, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 19.88888888888889, |
|
"grad_norm": 0.7431160807609558, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.9417, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 19.977777777777778, |
|
"grad_norm": 0.8644082546234131, |
|
"learning_rate": 1.695054153220655e-06, |
|
"loss": 0.8363, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 0.8472647666931152, |
|
"learning_rate": 1.6628009695725348e-06, |
|
"loss": 0.5418, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 20.08888888888889, |
|
"grad_norm": 0.8104817867279053, |
|
"learning_rate": 1.630703980532528e-06, |
|
"loss": 0.7425, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 20.177777777777777, |
|
"grad_norm": 0.6427087187767029, |
|
"learning_rate": 1.5987691743739636e-06, |
|
"loss": 0.7761, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 20.266666666666666, |
|
"grad_norm": 0.8927726745605469, |
|
"learning_rate": 1.5670025091120219e-06, |
|
"loss": 0.8807, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 20.355555555555554, |
|
"grad_norm": 0.5097454786300659, |
|
"learning_rate": 1.5354099113921614e-06, |
|
"loss": 0.875, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 20.444444444444443, |
|
"grad_norm": 0.5797455310821533, |
|
"learning_rate": 1.5039972753843966e-06, |
|
"loss": 0.8312, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 20.533333333333335, |
|
"grad_norm": 0.7658204436302185, |
|
"learning_rate": 1.4727704616836297e-06, |
|
"loss": 0.8858, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 20.622222222222224, |
|
"grad_norm": 0.7070634365081787, |
|
"learning_rate": 1.441735296216256e-06, |
|
"loss": 0.9683, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 20.711111111111112, |
|
"grad_norm": 0.5789161920547485, |
|
"learning_rate": 1.4108975691532273e-06, |
|
"loss": 0.9102, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"grad_norm": 0.8189324736595154, |
|
"learning_rate": 1.3802630338297956e-06, |
|
"loss": 0.96, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 20.88888888888889, |
|
"grad_norm": 0.7410324215888977, |
|
"learning_rate": 1.3498374056721198e-06, |
|
"loss": 0.9536, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 20.977777777777778, |
|
"grad_norm": 0.9396767616271973, |
|
"learning_rate": 1.3196263611309539e-06, |
|
"loss": 0.8433, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"grad_norm": 0.7975627779960632, |
|
"learning_rate": 1.2896355366226e-06, |
|
"loss": 0.4761, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 21.08888888888889, |
|
"grad_norm": 0.7357563972473145, |
|
"learning_rate": 1.2598705274773299e-06, |
|
"loss": 0.8871, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 21.177777777777777, |
|
"grad_norm": 0.7067966461181641, |
|
"learning_rate": 1.2303368868954848e-06, |
|
"loss": 0.773, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 21.266666666666666, |
|
"grad_norm": 0.593105137348175, |
|
"learning_rate": 1.2010401249114166e-06, |
|
"loss": 0.8273, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 21.355555555555554, |
|
"grad_norm": 0.707051157951355, |
|
"learning_rate": 1.1719857073654923e-06, |
|
"loss": 0.8088, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 21.444444444444443, |
|
"grad_norm": 0.7845866084098816, |
|
"learning_rate": 1.1431790548843464e-06, |
|
"loss": 0.9616, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 21.533333333333335, |
|
"grad_norm": 0.481231689453125, |
|
"learning_rate": 1.1146255418695635e-06, |
|
"loss": 0.8371, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 21.622222222222224, |
|
"grad_norm": 1.1225308179855347, |
|
"learning_rate": 1.0863304954949856e-06, |
|
"loss": 0.802, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 21.711111111111112, |
|
"grad_norm": 0.6318144798278809, |
|
"learning_rate": 1.0582991947128324e-06, |
|
"loss": 0.8741, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"grad_norm": 0.7717880010604858, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.7535, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 21.88888888888889, |
|
"grad_norm": 0.6893540024757385, |
|
"learning_rate": 1.0030486987264436e-06, |
|
"loss": 0.894, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 21.977777777777778, |
|
"grad_norm": 0.6379507184028625, |
|
"learning_rate": 9.758398115006637e-07, |
|
"loss": 0.9623, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"grad_norm": 0.9665340781211853, |
|
"learning_rate": 9.489152839010799e-07, |
|
"loss": 0.9206, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 22.08888888888889, |
|
"grad_norm": 0.6625165343284607, |
|
"learning_rate": 9.222801391848688e-07, |
|
"loss": 0.696, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 22.177777777777777, |
|
"grad_norm": 0.6619503498077393, |
|
"learning_rate": 8.959393466195973e-07, |
|
"loss": 0.8825, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 22.266666666666666, |
|
"grad_norm": 0.5958104133605957, |
|
"learning_rate": 8.69897820556124e-07, |
|
"loss": 0.9445, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 22.355555555555554, |
|
"grad_norm": 0.7491523027420044, |
|
"learning_rate": 8.441604195117315e-07, |
|
"loss": 0.8024, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 22.444444444444443, |
|
"grad_norm": 0.6927403211593628, |
|
"learning_rate": 8.187319452636821e-07, |
|
"loss": 0.7532, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 22.533333333333335, |
|
"grad_norm": 0.7777316570281982, |
|
"learning_rate": 7.936171419533653e-07, |
|
"loss": 0.949, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 22.622222222222224, |
|
"grad_norm": 0.5703297257423401, |
|
"learning_rate": 7.688206952011862e-07, |
|
"loss": 0.7882, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 22.711111111111112, |
|
"grad_norm": 0.717693030834198, |
|
"learning_rate": 7.443472312323824e-07, |
|
"loss": 0.7254, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"grad_norm": 0.6906650066375732, |
|
"learning_rate": 7.202013160139159e-07, |
|
"loss": 1.1334, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 22.88888888888889, |
|
"grad_norm": 0.7221674919128418, |
|
"learning_rate": 6.963874544026109e-07, |
|
"loss": 0.7708, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 22.977777777777778, |
|
"grad_norm": 0.7417134642601013, |
|
"learning_rate": 6.729100893046897e-07, |
|
"loss": 0.8617, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"grad_norm": 0.7064130306243896, |
|
"learning_rate": 6.497736008468703e-07, |
|
"loss": 0.7606, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 23.08888888888889, |
|
"grad_norm": 0.741130530834198, |
|
"learning_rate": 6.269823055591726e-07, |
|
"loss": 0.7765, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 23.177777777777777, |
|
"grad_norm": 1.3605138063430786, |
|
"learning_rate": 6.045404555695935e-07, |
|
"loss": 0.907, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 23.266666666666666, |
|
"grad_norm": 0.6725257635116577, |
|
"learning_rate": 5.824522378107936e-07, |
|
"loss": 1.0291, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 23.355555555555554, |
|
"grad_norm": 0.5731009840965271, |
|
"learning_rate": 5.607217732389503e-07, |
|
"loss": 0.8876, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 23.444444444444443, |
|
"grad_norm": 0.8626798391342163, |
|
"learning_rate": 5.393531160649221e-07, |
|
"loss": 0.9152, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 23.533333333333335, |
|
"grad_norm": 0.6386983394622803, |
|
"learning_rate": 5.183502529978548e-07, |
|
"loss": 0.9826, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 23.622222222222224, |
|
"grad_norm": 0.9047194719314575, |
|
"learning_rate": 4.977171025013961e-07, |
|
"loss": 0.7636, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 23.711111111111112, |
|
"grad_norm": 0.671303927898407, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.6843, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"grad_norm": 0.5185788869857788, |
|
"learning_rate": 4.5757526747389506e-07, |
|
"loss": 0.8194, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 23.88888888888889, |
|
"grad_norm": 0.673641562461853, |
|
"learning_rate": 4.380740721275786e-07, |
|
"loss": 0.8164, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 23.977777777777778, |
|
"grad_norm": 0.7130348086357117, |
|
"learning_rate": 4.189575663240794e-07, |
|
"loss": 0.7278, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"grad_norm": 0.894011914730072, |
|
"learning_rate": 4.002293165930088e-07, |
|
"loss": 0.7292, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 24.08888888888889, |
|
"grad_norm": 0.7084284424781799, |
|
"learning_rate": 3.818928170277911e-07, |
|
"loss": 0.6609, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 24.177777777777777, |
|
"grad_norm": 0.7356826066970825, |
|
"learning_rate": 3.639514886337786e-07, |
|
"loss": 0.8054, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 24.266666666666666, |
|
"grad_norm": 0.6582125425338745, |
|
"learning_rate": 3.4640867869000036e-07, |
|
"loss": 0.869, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 24.355555555555554, |
|
"grad_norm": 0.6234636306762695, |
|
"learning_rate": 3.292676601246661e-07, |
|
"loss": 1.0296, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 24.444444444444443, |
|
"grad_norm": 1.0210919380187988, |
|
"learning_rate": 3.125316309045434e-07, |
|
"loss": 0.9983, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 24.533333333333335, |
|
"grad_norm": 0.5547788143157959, |
|
"learning_rate": 2.962037134383211e-07, |
|
"loss": 0.8998, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 24.622222222222224, |
|
"grad_norm": 0.6771321296691895, |
|
"learning_rate": 2.80286953994062e-07, |
|
"loss": 0.805, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 24.711111111111112, |
|
"grad_norm": 0.6345370411872864, |
|
"learning_rate": 2.647843221308721e-07, |
|
"loss": 0.6883, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"grad_norm": 0.6311068534851074, |
|
"learning_rate": 2.496987101448728e-07, |
|
"loss": 0.8913, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 24.88888888888889, |
|
"grad_norm": 0.7895520925521851, |
|
"learning_rate": 2.3503293252959136e-07, |
|
"loss": 0.938, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 24.977777777777778, |
|
"grad_norm": 0.7793102860450745, |
|
"learning_rate": 2.2078972545086647e-07, |
|
"loss": 0.8841, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 0.6420133709907532, |
|
"learning_rate": 2.0697174623636795e-07, |
|
"loss": 0.556, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 25.08888888888889, |
|
"grad_norm": 0.6089626550674438, |
|
"learning_rate": 1.9358157287982099e-07, |
|
"loss": 0.8709, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 25.177777777777777, |
|
"grad_norm": 1.60710871219635, |
|
"learning_rate": 1.8062170356003854e-07, |
|
"loss": 0.6853, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 25.266666666666666, |
|
"grad_norm": 0.678098738193512, |
|
"learning_rate": 1.680945561748412e-07, |
|
"loss": 0.995, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 25.355555555555554, |
|
"grad_norm": 0.5670207738876343, |
|
"learning_rate": 1.5600246788994938e-07, |
|
"loss": 0.8167, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 25.444444444444443, |
|
"grad_norm": 0.6996327638626099, |
|
"learning_rate": 1.44347694702949e-07, |
|
"loss": 0.9414, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 25.533333333333335, |
|
"grad_norm": 0.7256758213043213, |
|
"learning_rate": 1.3313241102239056e-07, |
|
"loss": 0.9439, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 25.622222222222224, |
|
"grad_norm": 0.5879709124565125, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 0.6697, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 25.711111111111112, |
|
"grad_norm": 0.7841221690177917, |
|
"learning_rate": 1.120285994508799e-07, |
|
"loss": 0.9173, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"grad_norm": 0.6498060822486877, |
|
"learning_rate": 1.0214400885734194e-07, |
|
"loss": 0.8086, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 25.88888888888889, |
|
"grad_norm": 0.740508496761322, |
|
"learning_rate": 9.270678163050218e-08, |
|
"loss": 0.8424, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 25.977777777777778, |
|
"grad_norm": 0.6314177513122559, |
|
"learning_rate": 8.371867845563819e-08, |
|
"loss": 0.8007, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"grad_norm": 0.8741561770439148, |
|
"learning_rate": 7.518137622582189e-08, |
|
"loss": 0.8582, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 26.08888888888889, |
|
"grad_norm": 0.7642889022827148, |
|
"learning_rate": 6.70964677290617e-08, |
|
"loss": 0.767, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 26.177777777777777, |
|
"grad_norm": 0.5676147937774658, |
|
"learning_rate": 5.946546135113862e-08, |
|
"loss": 0.8953, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 26.266666666666666, |
|
"grad_norm": 0.6517917513847351, |
|
"learning_rate": 5.2289780794192726e-08, |
|
"loss": 0.7334, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 26.355555555555554, |
|
"grad_norm": 0.8068559169769287, |
|
"learning_rate": 4.557076481110367e-08, |
|
"loss": 0.9373, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 26.444444444444443, |
|
"grad_norm": 0.5517790913581848, |
|
"learning_rate": 3.930966695572136e-08, |
|
"loss": 0.884, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 26.533333333333335, |
|
"grad_norm": 0.6840013861656189, |
|
"learning_rate": 3.3507655348995194e-08, |
|
"loss": 0.7411, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 26.622222222222224, |
|
"grad_norm": 1.3431508541107178, |
|
"learning_rate": 2.8165812461038166e-08, |
|
"loss": 0.7748, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 26.711111111111112, |
|
"grad_norm": 0.7526693940162659, |
|
"learning_rate": 2.3285134909173113e-08, |
|
"loss": 0.8678, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 26.8, |
|
"grad_norm": 0.7261110544204712, |
|
"learning_rate": 1.886653327199617e-08, |
|
"loss": 0.8908, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 26.88888888888889, |
|
"grad_norm": 0.6295614242553711, |
|
"learning_rate": 1.4910831919490997e-08, |
|
"loss": 0.8878, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 26.977777777777778, |
|
"grad_norm": 0.8621529936790466, |
|
"learning_rate": 1.1418768859227935e-08, |
|
"loss": 0.895, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"grad_norm": 0.7991691827774048, |
|
"learning_rate": 8.390995598676067e-09, |
|
"loss": 0.802, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 27.08888888888889, |
|
"grad_norm": 0.551089882850647, |
|
"learning_rate": 5.828077023651846e-09, |
|
"loss": 0.6027, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 27.177777777777777, |
|
"grad_norm": 0.6393262147903442, |
|
"learning_rate": 3.730491292930072e-09, |
|
"loss": 0.7449, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 27.266666666666666, |
|
"grad_norm": 0.7486969232559204, |
|
"learning_rate": 2.0986297490338536e-09, |
|
"loss": 0.9378, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 27.355555555555554, |
|
"grad_norm": 0.5878354907035828, |
|
"learning_rate": 9.32796845223294e-10, |
|
"loss": 0.8487, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 27.444444444444443, |
|
"grad_norm": 0.7258017659187317, |
|
"learning_rate": 2.3321008869481296e-10, |
|
"loss": 0.9618, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 27.533333333333335, |
|
"grad_norm": 0.6953983902931213, |
|
"learning_rate": 0.0, |
|
"loss": 1.0057, |
|
"step": 330 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 330, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1131745240245862e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|