kodcode-v1-fixed / trainer_state.json
moogician's picture
Upload trainer_state.json with huggingface_hub
e0d6bd9 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.975903614457831,
"eval_steps": 500,
"global_step": 162,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.03614457831325301,
"grad_norm": 2.8209025859832764,
"learning_rate": 5.882352941176471e-07,
"loss": 0.7616,
"step": 1
},
{
"epoch": 0.07228915662650602,
"grad_norm": 2.7777888774871826,
"learning_rate": 1.1764705882352942e-06,
"loss": 0.7427,
"step": 2
},
{
"epoch": 0.10843373493975904,
"grad_norm": 2.813636064529419,
"learning_rate": 1.7647058823529414e-06,
"loss": 0.744,
"step": 3
},
{
"epoch": 0.14457831325301204,
"grad_norm": 2.675807476043701,
"learning_rate": 2.3529411764705885e-06,
"loss": 0.7432,
"step": 4
},
{
"epoch": 0.18072289156626506,
"grad_norm": 2.662332773208618,
"learning_rate": 2.9411764705882355e-06,
"loss": 0.7608,
"step": 5
},
{
"epoch": 0.21686746987951808,
"grad_norm": 2.7628424167633057,
"learning_rate": 3.529411764705883e-06,
"loss": 0.7493,
"step": 6
},
{
"epoch": 0.25301204819277107,
"grad_norm": 2.4826860427856445,
"learning_rate": 4.11764705882353e-06,
"loss": 0.7395,
"step": 7
},
{
"epoch": 0.2891566265060241,
"grad_norm": 2.5429294109344482,
"learning_rate": 4.705882352941177e-06,
"loss": 0.7363,
"step": 8
},
{
"epoch": 0.3253012048192771,
"grad_norm": 1.8845857381820679,
"learning_rate": 5.294117647058824e-06,
"loss": 0.7224,
"step": 9
},
{
"epoch": 0.3614457831325301,
"grad_norm": 1.3661439418792725,
"learning_rate": 5.882352941176471e-06,
"loss": 0.7079,
"step": 10
},
{
"epoch": 0.39759036144578314,
"grad_norm": 1.1115751266479492,
"learning_rate": 6.470588235294119e-06,
"loss": 0.6976,
"step": 11
},
{
"epoch": 0.43373493975903615,
"grad_norm": 0.9719029068946838,
"learning_rate": 7.058823529411766e-06,
"loss": 0.6877,
"step": 12
},
{
"epoch": 0.46987951807228917,
"grad_norm": 1.5337737798690796,
"learning_rate": 7.647058823529411e-06,
"loss": 0.6651,
"step": 13
},
{
"epoch": 0.5060240963855421,
"grad_norm": 1.6938939094543457,
"learning_rate": 8.23529411764706e-06,
"loss": 0.6765,
"step": 14
},
{
"epoch": 0.5421686746987951,
"grad_norm": 1.6760753393173218,
"learning_rate": 8.823529411764707e-06,
"loss": 0.6804,
"step": 15
},
{
"epoch": 0.5783132530120482,
"grad_norm": 1.392161250114441,
"learning_rate": 9.411764705882354e-06,
"loss": 0.6565,
"step": 16
},
{
"epoch": 0.6144578313253012,
"grad_norm": 1.2017204761505127,
"learning_rate": 1e-05,
"loss": 0.6346,
"step": 17
},
{
"epoch": 0.6506024096385542,
"grad_norm": 1.2758034467697144,
"learning_rate": 9.99882649009242e-06,
"loss": 0.669,
"step": 18
},
{
"epoch": 0.6867469879518072,
"grad_norm": 0.9672208428382874,
"learning_rate": 9.995306511219885e-06,
"loss": 0.6494,
"step": 19
},
{
"epoch": 0.7228915662650602,
"grad_norm": 0.7265082597732544,
"learning_rate": 9.989441715674422e-06,
"loss": 0.6397,
"step": 20
},
{
"epoch": 0.7590361445783133,
"grad_norm": 0.605043351650238,
"learning_rate": 9.981234856414306e-06,
"loss": 0.645,
"step": 21
},
{
"epoch": 0.7951807228915663,
"grad_norm": 0.6995558142662048,
"learning_rate": 9.970689785771798e-06,
"loss": 0.6516,
"step": 22
},
{
"epoch": 0.8313253012048193,
"grad_norm": 0.7498184442520142,
"learning_rate": 9.957811453644848e-06,
"loss": 0.6296,
"step": 23
},
{
"epoch": 0.8674698795180723,
"grad_norm": 0.7003129124641418,
"learning_rate": 9.942605905173593e-06,
"loss": 0.6335,
"step": 24
},
{
"epoch": 0.9036144578313253,
"grad_norm": 0.721664309501648,
"learning_rate": 9.925080277902743e-06,
"loss": 0.641,
"step": 25
},
{
"epoch": 0.9397590361445783,
"grad_norm": 0.5983788371086121,
"learning_rate": 9.905242798431196e-06,
"loss": 0.6236,
"step": 26
},
{
"epoch": 0.9759036144578314,
"grad_norm": 0.5488296747207642,
"learning_rate": 9.883102778550434e-06,
"loss": 0.6434,
"step": 27
},
{
"epoch": 1.036144578313253,
"grad_norm": 1.0052872896194458,
"learning_rate": 9.858670610873528e-06,
"loss": 1.2338,
"step": 28
},
{
"epoch": 1.072289156626506,
"grad_norm": 0.4585392475128174,
"learning_rate": 9.831957763956814e-06,
"loss": 0.5655,
"step": 29
},
{
"epoch": 1.108433734939759,
"grad_norm": 0.5069596767425537,
"learning_rate": 9.802976776916493e-06,
"loss": 0.5669,
"step": 30
},
{
"epoch": 1.144578313253012,
"grad_norm": 0.5148389935493469,
"learning_rate": 9.771741253542742e-06,
"loss": 0.5765,
"step": 31
},
{
"epoch": 1.180722891566265,
"grad_norm": 0.5303863883018494,
"learning_rate": 9.738265855914014e-06,
"loss": 0.5701,
"step": 32
},
{
"epoch": 1.216867469879518,
"grad_norm": 0.5023403763771057,
"learning_rate": 9.70256629751462e-06,
"loss": 0.5825,
"step": 33
},
{
"epoch": 1.2530120481927711,
"grad_norm": 0.37979233264923096,
"learning_rate": 9.664659335858755e-06,
"loss": 0.5575,
"step": 34
},
{
"epoch": 1.2891566265060241,
"grad_norm": 0.46085426211357117,
"learning_rate": 9.624562764624445e-06,
"loss": 0.5377,
"step": 35
},
{
"epoch": 1.3253012048192772,
"grad_norm": 0.5160898566246033,
"learning_rate": 9.582295405301131e-06,
"loss": 0.5801,
"step": 36
},
{
"epoch": 1.3614457831325302,
"grad_norm": 0.4731687903404236,
"learning_rate": 9.537877098354787e-06,
"loss": 0.5462,
"step": 37
},
{
"epoch": 1.3975903614457832,
"grad_norm": 0.37070778012275696,
"learning_rate": 9.491328693914723e-06,
"loss": 0.5689,
"step": 38
},
{
"epoch": 1.4337349397590362,
"grad_norm": 0.3897731900215149,
"learning_rate": 9.442672041986456e-06,
"loss": 0.5429,
"step": 39
},
{
"epoch": 1.4698795180722892,
"grad_norm": 0.3995729684829712,
"learning_rate": 9.391929982195233e-06,
"loss": 0.5554,
"step": 40
},
{
"epoch": 1.5060240963855422,
"grad_norm": 0.4201774299144745,
"learning_rate": 9.339126333065008e-06,
"loss": 0.5513,
"step": 41
},
{
"epoch": 1.5421686746987953,
"grad_norm": 0.4044627249240875,
"learning_rate": 9.284285880837947e-06,
"loss": 0.5419,
"step": 42
},
{
"epoch": 1.5783132530120483,
"grad_norm": 0.38670021295547485,
"learning_rate": 9.22743436783966e-06,
"loss": 0.5504,
"step": 43
},
{
"epoch": 1.6144578313253013,
"grad_norm": 0.3694572150707245,
"learning_rate": 9.168598480395653e-06,
"loss": 0.5435,
"step": 44
},
{
"epoch": 1.6506024096385543,
"grad_norm": 0.37926754355430603,
"learning_rate": 9.107805836304658e-06,
"loss": 0.5437,
"step": 45
},
{
"epoch": 1.6867469879518073,
"grad_norm": 0.33715763688087463,
"learning_rate": 9.045084971874738e-06,
"loss": 0.5485,
"step": 46
},
{
"epoch": 1.7228915662650603,
"grad_norm": 0.373822420835495,
"learning_rate": 8.98046532852822e-06,
"loss": 0.5498,
"step": 47
},
{
"epoch": 1.7590361445783134,
"grad_norm": 0.37885019183158875,
"learning_rate": 8.91397723898178e-06,
"loss": 0.54,
"step": 48
},
{
"epoch": 1.7951807228915664,
"grad_norm": 0.3432829976081848,
"learning_rate": 8.845651913008145e-06,
"loss": 0.5558,
"step": 49
},
{
"epoch": 1.8313253012048194,
"grad_norm": 0.34714093804359436,
"learning_rate": 8.775521422786104e-06,
"loss": 0.5356,
"step": 50
},
{
"epoch": 1.8674698795180724,
"grad_norm": 0.34533223509788513,
"learning_rate": 8.703618687845697e-06,
"loss": 0.532,
"step": 51
},
{
"epoch": 1.9036144578313254,
"grad_norm": 0.33921247720718384,
"learning_rate": 8.629977459615655e-06,
"loss": 0.5414,
"step": 52
},
{
"epoch": 1.9397590361445785,
"grad_norm": 0.3631800413131714,
"learning_rate": 8.554632305580355e-06,
"loss": 0.5625,
"step": 53
},
{
"epoch": 1.9759036144578315,
"grad_norm": 0.3610064685344696,
"learning_rate": 8.477618593053693e-06,
"loss": 0.5483,
"step": 54
},
{
"epoch": 2.036144578313253,
"grad_norm": 0.6465901136398315,
"learning_rate": 8.39897247257754e-06,
"loss": 0.9698,
"step": 55
},
{
"epoch": 2.072289156626506,
"grad_norm": 0.4273433983325958,
"learning_rate": 8.318730860952523e-06,
"loss": 0.4964,
"step": 56
},
{
"epoch": 2.108433734939759,
"grad_norm": 0.3399488031864166,
"learning_rate": 8.23693142390914e-06,
"loss": 0.468,
"step": 57
},
{
"epoch": 2.144578313253012,
"grad_norm": 0.36461561918258667,
"learning_rate": 8.153612558427311e-06,
"loss": 0.4674,
"step": 58
},
{
"epoch": 2.180722891566265,
"grad_norm": 0.41741812229156494,
"learning_rate": 8.068813374712689e-06,
"loss": 0.46,
"step": 59
},
{
"epoch": 2.216867469879518,
"grad_norm": 0.539730966091156,
"learning_rate": 7.982573677838172e-06,
"loss": 0.4591,
"step": 60
},
{
"epoch": 2.253012048192771,
"grad_norm": 0.3364892303943634,
"learning_rate": 7.894933949059245e-06,
"loss": 0.4656,
"step": 61
},
{
"epoch": 2.289156626506024,
"grad_norm": 0.3342113196849823,
"learning_rate": 7.805935326811913e-06,
"loss": 0.4669,
"step": 62
},
{
"epoch": 2.325301204819277,
"grad_norm": 0.4477402865886688,
"learning_rate": 7.715619587402165e-06,
"loss": 0.4774,
"step": 63
},
{
"epoch": 2.36144578313253,
"grad_norm": 0.4410865902900696,
"learning_rate": 7.624029125396004e-06,
"loss": 0.4386,
"step": 64
},
{
"epoch": 2.397590361445783,
"grad_norm": 0.3660714328289032,
"learning_rate": 7.53120693371927e-06,
"loss": 0.4412,
"step": 65
},
{
"epoch": 2.433734939759036,
"grad_norm": 0.3396703898906708,
"learning_rate": 7.437196583476597e-06,
"loss": 0.4442,
"step": 66
},
{
"epoch": 2.4698795180722892,
"grad_norm": 0.352255254983902,
"learning_rate": 7.342042203498952e-06,
"loss": 0.4664,
"step": 67
},
{
"epoch": 2.5060240963855422,
"grad_norm": 0.39523279666900635,
"learning_rate": 7.245788459629397e-06,
"loss": 0.4386,
"step": 68
},
{
"epoch": 2.5421686746987953,
"grad_norm": 0.42037248611450195,
"learning_rate": 7.148480533756759e-06,
"loss": 0.4406,
"step": 69
},
{
"epoch": 2.5783132530120483,
"grad_norm": 0.3557772636413574,
"learning_rate": 7.050164102607081e-06,
"loss": 0.4568,
"step": 70
},
{
"epoch": 2.6144578313253013,
"grad_norm": 0.3083244562149048,
"learning_rate": 6.950885316302773e-06,
"loss": 0.4193,
"step": 71
},
{
"epoch": 2.6506024096385543,
"grad_norm": 0.3360963463783264,
"learning_rate": 6.850690776699574e-06,
"loss": 0.4265,
"step": 72
},
{
"epoch": 2.6867469879518073,
"grad_norm": 0.3748219907283783,
"learning_rate": 6.749627515511443e-06,
"loss": 0.4544,
"step": 73
},
{
"epoch": 2.7228915662650603,
"grad_norm": 0.3778473436832428,
"learning_rate": 6.647742972233703e-06,
"loss": 0.435,
"step": 74
},
{
"epoch": 2.7590361445783134,
"grad_norm": 0.3551197946071625,
"learning_rate": 6.545084971874738e-06,
"loss": 0.4466,
"step": 75
},
{
"epoch": 2.7951807228915664,
"grad_norm": 0.35094714164733887,
"learning_rate": 6.441701702506755e-06,
"loss": 0.4189,
"step": 76
},
{
"epoch": 2.8313253012048194,
"grad_norm": 0.32956451177597046,
"learning_rate": 6.337641692646106e-06,
"loss": 0.419,
"step": 77
},
{
"epoch": 2.8674698795180724,
"grad_norm": 0.36045852303504944,
"learning_rate": 6.2329537884738115e-06,
"loss": 0.4361,
"step": 78
},
{
"epoch": 2.9036144578313254,
"grad_norm": 0.3273407816886902,
"learning_rate": 6.127687130906972e-06,
"loss": 0.4588,
"step": 79
},
{
"epoch": 2.9397590361445785,
"grad_norm": 0.3656028211116791,
"learning_rate": 6.021891132531825e-06,
"loss": 0.4462,
"step": 80
},
{
"epoch": 2.9759036144578315,
"grad_norm": 0.35299059748649597,
"learning_rate": 5.915615454409281e-06,
"loss": 0.4599,
"step": 81
},
{
"epoch": 3.036144578313253,
"grad_norm": 0.5190630555152893,
"learning_rate": 5.808909982763825e-06,
"loss": 0.8681,
"step": 82
},
{
"epoch": 3.072289156626506,
"grad_norm": 0.40097540616989136,
"learning_rate": 5.701824805566722e-06,
"loss": 0.387,
"step": 83
},
{
"epoch": 3.108433734939759,
"grad_norm": 0.37181419134140015,
"learning_rate": 5.594410189024533e-06,
"loss": 0.3973,
"step": 84
},
{
"epoch": 3.144578313253012,
"grad_norm": 0.36105361580848694,
"learning_rate": 5.4867165539839505e-06,
"loss": 0.3706,
"step": 85
},
{
"epoch": 3.180722891566265,
"grad_norm": 0.32242706418037415,
"learning_rate": 5.378794452264053e-06,
"loss": 0.3647,
"step": 86
},
{
"epoch": 3.216867469879518,
"grad_norm": 0.37879207730293274,
"learning_rate": 5.270694542927089e-06,
"loss": 0.3568,
"step": 87
},
{
"epoch": 3.253012048192771,
"grad_norm": 0.3197579085826874,
"learning_rate": 5.1624675684989035e-06,
"loss": 0.3512,
"step": 88
},
{
"epoch": 3.289156626506024,
"grad_norm": 0.3562887907028198,
"learning_rate": 5.054164331150199e-06,
"loss": 0.3561,
"step": 89
},
{
"epoch": 3.325301204819277,
"grad_norm": 0.343924343585968,
"learning_rate": 4.945835668849801e-06,
"loss": 0.358,
"step": 90
},
{
"epoch": 3.36144578313253,
"grad_norm": 0.35644856095314026,
"learning_rate": 4.837532431501098e-06,
"loss": 0.3673,
"step": 91
},
{
"epoch": 3.397590361445783,
"grad_norm": 0.4029666781425476,
"learning_rate": 4.729305457072913e-06,
"loss": 0.3534,
"step": 92
},
{
"epoch": 3.433734939759036,
"grad_norm": 0.3742657005786896,
"learning_rate": 4.621205547735949e-06,
"loss": 0.3623,
"step": 93
},
{
"epoch": 3.4698795180722892,
"grad_norm": 0.3467872142791748,
"learning_rate": 4.513283446016052e-06,
"loss": 0.3408,
"step": 94
},
{
"epoch": 3.5060240963855422,
"grad_norm": 0.36237695813179016,
"learning_rate": 4.4055898109754684e-06,
"loss": 0.3612,
"step": 95
},
{
"epoch": 3.5421686746987953,
"grad_norm": 0.3537854254245758,
"learning_rate": 4.298175194433279e-06,
"loss": 0.3696,
"step": 96
},
{
"epoch": 3.5783132530120483,
"grad_norm": 0.35789230465888977,
"learning_rate": 4.191090017236177e-06,
"loss": 0.3652,
"step": 97
},
{
"epoch": 3.6144578313253013,
"grad_norm": 0.34432247281074524,
"learning_rate": 4.0843845455907195e-06,
"loss": 0.3422,
"step": 98
},
{
"epoch": 3.6506024096385543,
"grad_norm": 0.36375707387924194,
"learning_rate": 3.9781088674681764e-06,
"loss": 0.3717,
"step": 99
},
{
"epoch": 3.6867469879518073,
"grad_norm": 0.34923499822616577,
"learning_rate": 3.87231286909303e-06,
"loss": 0.3436,
"step": 100
},
{
"epoch": 3.7228915662650603,
"grad_norm": 0.35821905732154846,
"learning_rate": 3.767046211526191e-06,
"loss": 0.3584,
"step": 101
},
{
"epoch": 3.7590361445783134,
"grad_norm": 0.3310084640979767,
"learning_rate": 3.662358307353897e-06,
"loss": 0.3751,
"step": 102
},
{
"epoch": 3.7951807228915664,
"grad_norm": 0.3326928913593292,
"learning_rate": 3.5582982974932467e-06,
"loss": 0.3578,
"step": 103
},
{
"epoch": 3.8313253012048194,
"grad_norm": 0.3595709502696991,
"learning_rate": 3.4549150281252635e-06,
"loss": 0.3504,
"step": 104
},
{
"epoch": 3.8674698795180724,
"grad_norm": 0.3628636598587036,
"learning_rate": 3.3522570277662986e-06,
"loss": 0.3371,
"step": 105
},
{
"epoch": 3.9036144578313254,
"grad_norm": 0.2951819896697998,
"learning_rate": 3.250372484488558e-06,
"loss": 0.3565,
"step": 106
},
{
"epoch": 3.9397590361445785,
"grad_norm": 0.3485693037509918,
"learning_rate": 3.149309223300428e-06,
"loss": 0.3649,
"step": 107
},
{
"epoch": 3.9759036144578315,
"grad_norm": 0.3397277891635895,
"learning_rate": 3.0491146836972273e-06,
"loss": 0.3413,
"step": 108
},
{
"epoch": 4.036144578313253,
"grad_norm": 0.5771328210830688,
"learning_rate": 2.9498358973929197e-06,
"loss": 0.6546,
"step": 109
},
{
"epoch": 4.072289156626506,
"grad_norm": 0.377215176820755,
"learning_rate": 2.8515194662432423e-06,
"loss": 0.32,
"step": 110
},
{
"epoch": 4.108433734939759,
"grad_norm": 0.39817994832992554,
"learning_rate": 2.7542115403706067e-06,
"loss": 0.3009,
"step": 111
},
{
"epoch": 4.144578313253012,
"grad_norm": 0.3676982820034027,
"learning_rate": 2.65795779650105e-06,
"loss": 0.2888,
"step": 112
},
{
"epoch": 4.180722891566265,
"grad_norm": 0.34246042370796204,
"learning_rate": 2.562803416523405e-06,
"loss": 0.3289,
"step": 113
},
{
"epoch": 4.216867469879518,
"grad_norm": 0.3002203702926636,
"learning_rate": 2.46879306628073e-06,
"loss": 0.2918,
"step": 114
},
{
"epoch": 4.253012048192771,
"grad_norm": 0.3316577672958374,
"learning_rate": 2.375970874603998e-06,
"loss": 0.2918,
"step": 115
},
{
"epoch": 4.289156626506024,
"grad_norm": 0.36060383915901184,
"learning_rate": 2.2843804125978356e-06,
"loss": 0.3083,
"step": 116
},
{
"epoch": 4.325301204819277,
"grad_norm": 0.3468538522720337,
"learning_rate": 2.1940646731880887e-06,
"loss": 0.2965,
"step": 117
},
{
"epoch": 4.36144578313253,
"grad_norm": 0.33555057644844055,
"learning_rate": 2.105066050940758e-06,
"loss": 0.2753,
"step": 118
},
{
"epoch": 4.397590361445783,
"grad_norm": 0.3662334680557251,
"learning_rate": 2.0174263221618307e-06,
"loss": 0.3069,
"step": 119
},
{
"epoch": 4.433734939759036,
"grad_norm": 0.3135122060775757,
"learning_rate": 1.931186625287313e-06,
"loss": 0.2963,
"step": 120
},
{
"epoch": 4.469879518072289,
"grad_norm": 0.3595134913921356,
"learning_rate": 1.8463874415726918e-06,
"loss": 0.2941,
"step": 121
},
{
"epoch": 4.506024096385542,
"grad_norm": 0.3410852551460266,
"learning_rate": 1.7630685760908623e-06,
"loss": 0.2933,
"step": 122
},
{
"epoch": 4.542168674698795,
"grad_norm": 0.3322881758213043,
"learning_rate": 1.6812691390474788e-06,
"loss": 0.312,
"step": 123
},
{
"epoch": 4.578313253012048,
"grad_norm": 0.3299103379249573,
"learning_rate": 1.6010275274224607e-06,
"loss": 0.2687,
"step": 124
},
{
"epoch": 4.614457831325301,
"grad_norm": 0.3352324366569519,
"learning_rate": 1.5223814069463077e-06,
"loss": 0.2856,
"step": 125
},
{
"epoch": 4.650602409638554,
"grad_norm": 0.3284769356250763,
"learning_rate": 1.4453676944196477e-06,
"loss": 0.2624,
"step": 126
},
{
"epoch": 4.686746987951807,
"grad_norm": 0.2953889071941376,
"learning_rate": 1.370022540384347e-06,
"loss": 0.295,
"step": 127
},
{
"epoch": 4.72289156626506,
"grad_norm": 0.30767425894737244,
"learning_rate": 1.296381312154305e-06,
"loss": 0.2872,
"step": 128
},
{
"epoch": 4.759036144578313,
"grad_norm": 0.3192180395126343,
"learning_rate": 1.2244785772138972e-06,
"loss": 0.289,
"step": 129
},
{
"epoch": 4.795180722891566,
"grad_norm": 0.3239184319972992,
"learning_rate": 1.1543480869918555e-06,
"loss": 0.2843,
"step": 130
},
{
"epoch": 4.831325301204819,
"grad_norm": 0.34262704849243164,
"learning_rate": 1.0860227610182222e-06,
"loss": 0.2841,
"step": 131
},
{
"epoch": 4.867469879518072,
"grad_norm": 0.3087993562221527,
"learning_rate": 1.0195346714717813e-06,
"loss": 0.2949,
"step": 132
},
{
"epoch": 4.903614457831325,
"grad_norm": 0.3130486011505127,
"learning_rate": 9.549150281252633e-07,
"loss": 0.2935,
"step": 133
},
{
"epoch": 4.9397590361445785,
"grad_norm": 0.34729984402656555,
"learning_rate": 8.921941636953435e-07,
"loss": 0.2852,
"step": 134
},
{
"epoch": 4.975903614457831,
"grad_norm": 0.2952175736427307,
"learning_rate": 8.314015196043501e-07,
"loss": 0.3026,
"step": 135
},
{
"epoch": 5.036144578313253,
"grad_norm": 0.33559635281562805,
"learning_rate": 7.725656321603414e-07,
"loss": 0.5577,
"step": 136
},
{
"epoch": 5.072289156626506,
"grad_norm": 0.30693137645721436,
"learning_rate": 7.157141191620548e-07,
"loss": 0.2559,
"step": 137
},
{
"epoch": 5.108433734939759,
"grad_norm": 0.3629642724990845,
"learning_rate": 6.60873666934993e-07,
"loss": 0.2612,
"step": 138
},
{
"epoch": 5.144578313253012,
"grad_norm": 0.36669614911079407,
"learning_rate": 6.080700178047688e-07,
"loss": 0.2997,
"step": 139
},
{
"epoch": 5.180722891566265,
"grad_norm": 0.314728707075119,
"learning_rate": 5.573279580135438e-07,
"loss": 0.2893,
"step": 140
},
{
"epoch": 5.216867469879518,
"grad_norm": 0.29658058285713196,
"learning_rate": 5.086713060852788e-07,
"loss": 0.2676,
"step": 141
},
{
"epoch": 5.253012048192771,
"grad_norm": 0.33411288261413574,
"learning_rate": 4.6212290164521554e-07,
"loss": 0.2433,
"step": 142
},
{
"epoch": 5.289156626506024,
"grad_norm": 0.3197992742061615,
"learning_rate": 4.1770459469887003e-07,
"loss": 0.2898,
"step": 143
},
{
"epoch": 5.325301204819277,
"grad_norm": 0.30660751461982727,
"learning_rate": 3.754372353755559e-07,
"loss": 0.2554,
"step": 144
},
{
"epoch": 5.36144578313253,
"grad_norm": 0.2993546724319458,
"learning_rate": 3.35340664141246e-07,
"loss": 0.2363,
"step": 145
},
{
"epoch": 5.397590361445783,
"grad_norm": 0.3170936107635498,
"learning_rate": 2.974337024853802e-07,
"loss": 0.2471,
"step": 146
},
{
"epoch": 5.433734939759036,
"grad_norm": 0.3209405541419983,
"learning_rate": 2.617341440859883e-07,
"loss": 0.2399,
"step": 147
},
{
"epoch": 5.469879518072289,
"grad_norm": 0.29624640941619873,
"learning_rate": 2.2825874645725942e-07,
"loss": 0.2651,
"step": 148
},
{
"epoch": 5.506024096385542,
"grad_norm": 0.30279141664505005,
"learning_rate": 1.9702322308350675e-07,
"loss": 0.2525,
"step": 149
},
{
"epoch": 5.542168674698795,
"grad_norm": 0.3102453351020813,
"learning_rate": 1.6804223604318825e-07,
"loss": 0.2871,
"step": 150
},
{
"epoch": 5.578313253012048,
"grad_norm": 0.27690696716308594,
"learning_rate": 1.413293891264722e-07,
"loss": 0.242,
"step": 151
},
{
"epoch": 5.614457831325301,
"grad_norm": 0.2901451885700226,
"learning_rate": 1.1689722144956672e-07,
"loss": 0.2704,
"step": 152
},
{
"epoch": 5.650602409638554,
"grad_norm": 0.3027832508087158,
"learning_rate": 9.475720156880419e-08,
"loss": 0.2681,
"step": 153
},
{
"epoch": 5.686746987951807,
"grad_norm": 0.3067132830619812,
"learning_rate": 7.491972209725807e-08,
"loss": 0.2789,
"step": 154
},
{
"epoch": 5.72289156626506,
"grad_norm": 0.2636028826236725,
"learning_rate": 5.739409482640956e-08,
"loss": 0.239,
"step": 155
},
{
"epoch": 5.759036144578313,
"grad_norm": 0.2763376533985138,
"learning_rate": 4.2188546355153016e-08,
"loss": 0.2624,
"step": 156
},
{
"epoch": 5.795180722891566,
"grad_norm": 0.2844434082508087,
"learning_rate": 2.9310214228202016e-08,
"loss": 0.2616,
"step": 157
},
{
"epoch": 5.831325301204819,
"grad_norm": 0.30939143896102905,
"learning_rate": 1.8765143585693924e-08,
"loss": 0.2651,
"step": 158
},
{
"epoch": 5.867469879518072,
"grad_norm": 0.2921662926673889,
"learning_rate": 1.0558284325578038e-08,
"loss": 0.2655,
"step": 159
},
{
"epoch": 5.903614457831325,
"grad_norm": 0.2594563364982605,
"learning_rate": 4.69348878011644e-09,
"loss": 0.2887,
"step": 160
},
{
"epoch": 5.9397590361445785,
"grad_norm": 0.2833521068096161,
"learning_rate": 1.173509907579362e-09,
"loss": 0.278,
"step": 161
},
{
"epoch": 5.975903614457831,
"grad_norm": 0.2993119955062866,
"learning_rate": 0.0,
"loss": 0.2823,
"step": 162
},
{
"epoch": 5.975903614457831,
"step": 162,
"total_flos": 212201928261632.0,
"train_loss": 0.4494376038143664,
"train_runtime": 18091.5652,
"train_samples_per_second": 0.439,
"train_steps_per_second": 0.009
}
],
"logging_steps": 1,
"max_steps": 162,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 28,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 212201928261632.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}