|
{ |
|
"best_metric": 0.42480990290641785, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.4241781548250265, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0042417815482502655, |
|
"grad_norm": 2.440074920654297, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.3247, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0042417815482502655, |
|
"eval_loss": 2.892012119293213, |
|
"eval_runtime": 8.3575, |
|
"eval_samples_per_second": 47.502, |
|
"eval_steps_per_second": 5.983, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008483563096500531, |
|
"grad_norm": 3.9008901119232178, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.4604, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012725344644750796, |
|
"grad_norm": 3.8679404258728027, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.5737, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016967126193001062, |
|
"grad_norm": 4.003969192504883, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.5677, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.021208907741251327, |
|
"grad_norm": 5.167972087860107, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.6147, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02545068928950159, |
|
"grad_norm": 4.082551956176758, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.5965, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029692470837751856, |
|
"grad_norm": 4.586578369140625, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 0.646, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.033934252386002124, |
|
"grad_norm": 10.951172828674316, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.765, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03817603393425239, |
|
"grad_norm": 4.923064231872559, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.6751, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.042417815482502653, |
|
"grad_norm": 4.684195518493652, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.7466, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04665959703075292, |
|
"grad_norm": 9.586348533630371, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 0.8434, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05090137857900318, |
|
"grad_norm": 15.056419372558594, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0154, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05514316012725345, |
|
"grad_norm": 6.797557830810547, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.8514, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05938494167550371, |
|
"grad_norm": 3.789074659347534, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.7579, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06362672322375397, |
|
"grad_norm": 4.713261604309082, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8242, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06786850477200425, |
|
"grad_norm": 3.963026285171509, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.8754, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07211028632025451, |
|
"grad_norm": 3.974591016769409, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.9025, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07635206786850478, |
|
"grad_norm": 4.688375473022461, |
|
"learning_rate": 3e-06, |
|
"loss": 1.0425, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08059384941675504, |
|
"grad_norm": 5.0737690925598145, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 1.2042, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08483563096500531, |
|
"grad_norm": 8.627097129821777, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.7189, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08907741251325557, |
|
"grad_norm": 11.974156379699707, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.9111, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09331919406150584, |
|
"grad_norm": 13.727147102355957, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 2.2629, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 19.623498916625977, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 2.8472, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10180275715800637, |
|
"grad_norm": 17.45310401916504, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.7759, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10604453870625663, |
|
"grad_norm": 22.359373092651367, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 2.3388, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1102863202545069, |
|
"grad_norm": 13.022788047790527, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 2.2261, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11452810180275716, |
|
"grad_norm": 18.914791107177734, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.9993, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11876988335100742, |
|
"grad_norm": 18.410812377929688, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 2.4895, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12301166489925769, |
|
"grad_norm": 14.935495376586914, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 2.3596, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12725344644750794, |
|
"grad_norm": 17.543804168701172, |
|
"learning_rate": 5e-06, |
|
"loss": 2.7266, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13149522799575822, |
|
"grad_norm": 15.843499183654785, |
|
"learning_rate": 4.997482666353287e-06, |
|
"loss": 2.292, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1357370095440085, |
|
"grad_norm": 16.629941940307617, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 2.4197, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13997879109225875, |
|
"grad_norm": 15.72042465209961, |
|
"learning_rate": 4.977374404419838e-06, |
|
"loss": 2.3473, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14422057264050903, |
|
"grad_norm": 13.605842590332031, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 2.0056, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14846235418875928, |
|
"grad_norm": 11.888983726501465, |
|
"learning_rate": 4.937319780454559e-06, |
|
"loss": 1.7911, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15270413573700956, |
|
"grad_norm": 18.208675384521484, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 2.2271, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1569459172852598, |
|
"grad_norm": 18.528289794921875, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 2.0293, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.16118769883351008, |
|
"grad_norm": 17.24335479736328, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 2.087, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.16542948038176034, |
|
"grad_norm": 14.06977367401123, |
|
"learning_rate": 4.7988194313786275e-06, |
|
"loss": 1.897, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16967126193001061, |
|
"grad_norm": 21.608551025390625, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 1.8175, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 16.8688907623291, |
|
"learning_rate": 4.701488829641845e-06, |
|
"loss": 1.8707, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17815482502651114, |
|
"grad_norm": 13.121498107910156, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 1.6978, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1823966065747614, |
|
"grad_norm": 13.48686408996582, |
|
"learning_rate": 4.586433134303257e-06, |
|
"loss": 1.6308, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18663838812301167, |
|
"grad_norm": 14.717817306518555, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 1.9161, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.19088016967126192, |
|
"grad_norm": 14.173543930053711, |
|
"learning_rate": 4.454578706170075e-06, |
|
"loss": 1.3766, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 15.915189743041992, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 1.4068, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19936373276776245, |
|
"grad_norm": 12.305916786193848, |
|
"learning_rate": 4.3069871595684795e-06, |
|
"loss": 1.1608, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.20360551431601273, |
|
"grad_norm": 21.32429313659668, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 1.3466, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20784729586426298, |
|
"grad_norm": 17.543088912963867, |
|
"learning_rate": 4.144846814849282e-06, |
|
"loss": 1.2793, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.21208907741251326, |
|
"grad_norm": 29.9601993560791, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 1.4011, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21208907741251326, |
|
"eval_loss": 0.8299828171730042, |
|
"eval_runtime": 8.0354, |
|
"eval_samples_per_second": 49.406, |
|
"eval_steps_per_second": 6.222, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2163308589607635, |
|
"grad_norm": 1.2941735982894897, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.2063, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2205726405090138, |
|
"grad_norm": 1.8185893297195435, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 0.2909, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.22481442205726404, |
|
"grad_norm": 2.4603350162506104, |
|
"learning_rate": 3.782248193514766e-06, |
|
"loss": 0.297, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.22905620360551432, |
|
"grad_norm": 2.465657949447632, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 0.3119, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.23329798515376457, |
|
"grad_norm": 2.5236356258392334, |
|
"learning_rate": 3.5847093477938955e-06, |
|
"loss": 0.2771, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23753976670201485, |
|
"grad_norm": 3.519874095916748, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 0.3052, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2417815482502651, |
|
"grad_norm": 2.217327356338501, |
|
"learning_rate": 3.3784370602033572e-06, |
|
"loss": 0.2883, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24602332979851538, |
|
"grad_norm": 2.3429453372955322, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.3006, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.25026511134676566, |
|
"grad_norm": 3.1694295406341553, |
|
"learning_rate": 3.165092113916688e-06, |
|
"loss": 0.3607, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2545068928950159, |
|
"grad_norm": 2.1960690021514893, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 0.2782, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.25874867444326616, |
|
"grad_norm": 2.5143775939941406, |
|
"learning_rate": 2.946392236996592e-06, |
|
"loss": 0.327, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.26299045599151644, |
|
"grad_norm": 2.0972142219543457, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 0.2211, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2672322375397667, |
|
"grad_norm": 1.8701742887496948, |
|
"learning_rate": 2.724098272258584e-06, |
|
"loss": 0.2287, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.271474019088017, |
|
"grad_norm": 2.386495351791382, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 0.2951, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2757158006362672, |
|
"grad_norm": 2.3010129928588867, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.2169, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2799575821845175, |
|
"grad_norm": 2.422025442123413, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 0.2676, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2841993637327678, |
|
"grad_norm": 2.306940793991089, |
|
"learning_rate": 2.2759017277414165e-06, |
|
"loss": 0.2568, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.28844114528101805, |
|
"grad_norm": 2.8453123569488525, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 0.3003, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 3.1190452575683594, |
|
"learning_rate": 2.053607763003409e-06, |
|
"loss": 0.3167, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.29692470837751855, |
|
"grad_norm": 4.037975788116455, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 0.4344, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.30116648992576883, |
|
"grad_norm": 4.7273454666137695, |
|
"learning_rate": 1.8349078860833125e-06, |
|
"loss": 0.4892, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3054082714740191, |
|
"grad_norm": 8.596299171447754, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.7955, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.30965005302226933, |
|
"grad_norm": 6.555408954620361, |
|
"learning_rate": 1.6215629397966432e-06, |
|
"loss": 0.7031, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3138918345705196, |
|
"grad_norm": 7.657025337219238, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 0.6821, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3181336161187699, |
|
"grad_norm": 7.3240227699279785, |
|
"learning_rate": 1.415290652206105e-06, |
|
"loss": 0.7677, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.32237539766702017, |
|
"grad_norm": 9.968438148498535, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 0.6359, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3266171792152704, |
|
"grad_norm": 7.8202338218688965, |
|
"learning_rate": 1.217751806485235e-06, |
|
"loss": 0.6877, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.33085896076352067, |
|
"grad_norm": 7.387623310089111, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 0.6521, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.33510074231177095, |
|
"grad_norm": 8.559468269348145, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.7261, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.33934252386002123, |
|
"grad_norm": 7.285356044769287, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 0.6358, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34358430540827145, |
|
"grad_norm": 9.018921852111816, |
|
"learning_rate": 8.551531851507186e-07, |
|
"loss": 0.6657, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 5.554042816162109, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 0.5567, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.352067868504772, |
|
"grad_norm": 11.054597854614258, |
|
"learning_rate": 6.930128404315214e-07, |
|
"loss": 0.8462, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3563096500530223, |
|
"grad_norm": 6.244119644165039, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 0.6058, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3605514316012725, |
|
"grad_norm": 10.842486381530762, |
|
"learning_rate": 5.454212938299256e-07, |
|
"loss": 0.7793, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3647932131495228, |
|
"grad_norm": 7.651045322418213, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.5938, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36903499469777307, |
|
"grad_norm": 10.755373001098633, |
|
"learning_rate": 4.1356686569674344e-07, |
|
"loss": 0.6908, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.37327677624602335, |
|
"grad_norm": 8.134658813476562, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 0.7198, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.37751855779427357, |
|
"grad_norm": 8.481114387512207, |
|
"learning_rate": 2.98511170358155e-07, |
|
"loss": 0.7024, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.38176033934252385, |
|
"grad_norm": 9.80128288269043, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 0.6167, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3860021208907741, |
|
"grad_norm": 11.155387878417969, |
|
"learning_rate": 2.0118056862137358e-07, |
|
"loss": 0.8483, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 7.825685501098633, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 0.6448, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3944856839872747, |
|
"grad_norm": 7.912622928619385, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 0.6102, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3987274655355249, |
|
"grad_norm": 11.073833465576172, |
|
"learning_rate": 9.00928482603669e-08, |
|
"loss": 0.562, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4029692470837752, |
|
"grad_norm": 7.491645336151123, |
|
"learning_rate": 6.268021954544095e-08, |
|
"loss": 0.4042, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.40721102863202546, |
|
"grad_norm": 9.142674446105957, |
|
"learning_rate": 4.017602850342584e-08, |
|
"loss": 0.4681, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.41145281018027574, |
|
"grad_norm": 15.355952262878418, |
|
"learning_rate": 2.262559558016325e-08, |
|
"loss": 0.7407, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.41569459172852596, |
|
"grad_norm": 13.96410083770752, |
|
"learning_rate": 1.006426501190233e-08, |
|
"loss": 0.5351, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.41993637327677624, |
|
"grad_norm": 29.766155242919922, |
|
"learning_rate": 2.5173336467135266e-09, |
|
"loss": 0.7461, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4241781548250265, |
|
"grad_norm": 28.113073348999023, |
|
"learning_rate": 0.0, |
|
"loss": 0.8913, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4241781548250265, |
|
"eval_loss": 0.42480990290641785, |
|
"eval_runtime": 8.0387, |
|
"eval_samples_per_second": 49.386, |
|
"eval_steps_per_second": 6.22, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.13314785247232e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|