|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 0.6134969325153374,
|
|
"eval_steps": 500,
|
|
"global_step": 100,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.006134969325153374,
|
|
"grad_norm": 88.35982513427734,
|
|
"learning_rate": 5e-06,
|
|
"loss": 2.5293,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.012269938650306749,
|
|
"grad_norm": 71.40210723876953,
|
|
"learning_rate": 1e-05,
|
|
"loss": 2.6329,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.018404907975460124,
|
|
"grad_norm": 82.39716339111328,
|
|
"learning_rate": 9.89795918367347e-06,
|
|
"loss": 2.7434,
|
|
"step": 3
|
|
},
|
|
{
|
|
"epoch": 0.024539877300613498,
|
|
"grad_norm": 35.27679443359375,
|
|
"learning_rate": 9.795918367346939e-06,
|
|
"loss": 1.7051,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.03067484662576687,
|
|
"grad_norm": 44.54578399658203,
|
|
"learning_rate": 9.693877551020408e-06,
|
|
"loss": 1.8552,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.03680981595092025,
|
|
"grad_norm": 66.71598052978516,
|
|
"learning_rate": 9.591836734693878e-06,
|
|
"loss": 2.3346,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.04294478527607362,
|
|
"grad_norm": 59.80192184448242,
|
|
"learning_rate": 9.489795918367348e-06,
|
|
"loss": 2.1256,
|
|
"step": 7
|
|
},
|
|
{
|
|
"epoch": 0.049079754601226995,
|
|
"grad_norm": 48.737754821777344,
|
|
"learning_rate": 9.387755102040818e-06,
|
|
"loss": 1.7364,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.05521472392638037,
|
|
"grad_norm": 28.469051361083984,
|
|
"learning_rate": 9.285714285714288e-06,
|
|
"loss": 1.9437,
|
|
"step": 9
|
|
},
|
|
{
|
|
"epoch": 0.06134969325153374,
|
|
"grad_norm": 54.786128997802734,
|
|
"learning_rate": 9.183673469387756e-06,
|
|
"loss": 3.0007,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.06748466257668712,
|
|
"grad_norm": 64.78273010253906,
|
|
"learning_rate": 9.081632653061225e-06,
|
|
"loss": 1.8778,
|
|
"step": 11
|
|
},
|
|
{
|
|
"epoch": 0.0736196319018405,
|
|
"grad_norm": 50.23028564453125,
|
|
"learning_rate": 8.979591836734695e-06,
|
|
"loss": 1.8922,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.07975460122699386,
|
|
"grad_norm": 64.22184753417969,
|
|
"learning_rate": 8.877551020408163e-06,
|
|
"loss": 2.0061,
|
|
"step": 13
|
|
},
|
|
{
|
|
"epoch": 0.08588957055214724,
|
|
"grad_norm": 30.814348220825195,
|
|
"learning_rate": 8.775510204081633e-06,
|
|
"loss": 1.6799,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.09202453987730061,
|
|
"grad_norm": 33.7660026550293,
|
|
"learning_rate": 8.673469387755103e-06,
|
|
"loss": 1.8202,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.09815950920245399,
|
|
"grad_norm": 19.061237335205078,
|
|
"learning_rate": 8.571428571428571e-06,
|
|
"loss": 1.5798,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.10429447852760736,
|
|
"grad_norm": 34.668060302734375,
|
|
"learning_rate": 8.469387755102042e-06,
|
|
"loss": 1.5905,
|
|
"step": 17
|
|
},
|
|
{
|
|
"epoch": 0.11042944785276074,
|
|
"grad_norm": 32.6378059387207,
|
|
"learning_rate": 8.36734693877551e-06,
|
|
"loss": 1.6961,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.1165644171779141,
|
|
"grad_norm": 37.90195083618164,
|
|
"learning_rate": 8.26530612244898e-06,
|
|
"loss": 1.7675,
|
|
"step": 19
|
|
},
|
|
{
|
|
"epoch": 0.12269938650306748,
|
|
"grad_norm": 50.06489562988281,
|
|
"learning_rate": 8.16326530612245e-06,
|
|
"loss": 1.4142,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.12883435582822086,
|
|
"grad_norm": 50.70490264892578,
|
|
"learning_rate": 8.06122448979592e-06,
|
|
"loss": 1.4977,
|
|
"step": 21
|
|
},
|
|
{
|
|
"epoch": 0.13496932515337423,
|
|
"grad_norm": 32.71562576293945,
|
|
"learning_rate": 7.959183673469388e-06,
|
|
"loss": 1.3493,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.1411042944785276,
|
|
"grad_norm": 36.24839782714844,
|
|
"learning_rate": 7.857142857142858e-06,
|
|
"loss": 1.5274,
|
|
"step": 23
|
|
},
|
|
{
|
|
"epoch": 0.147239263803681,
|
|
"grad_norm": 40.0679817199707,
|
|
"learning_rate": 7.755102040816327e-06,
|
|
"loss": 2.0856,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.15337423312883436,
|
|
"grad_norm": 64.05142211914062,
|
|
"learning_rate": 7.653061224489796e-06,
|
|
"loss": 2.0254,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.15950920245398773,
|
|
"grad_norm": 35.79407501220703,
|
|
"learning_rate": 7.551020408163265e-06,
|
|
"loss": 1.8358,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.1656441717791411,
|
|
"grad_norm": 40.65586471557617,
|
|
"learning_rate": 7.448979591836736e-06,
|
|
"loss": 2.43,
|
|
"step": 27
|
|
},
|
|
{
|
|
"epoch": 0.17177914110429449,
|
|
"grad_norm": 45.51654052734375,
|
|
"learning_rate": 7.346938775510205e-06,
|
|
"loss": 1.5213,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.17791411042944785,
|
|
"grad_norm": 18.54694175720215,
|
|
"learning_rate": 7.244897959183675e-06,
|
|
"loss": 1.7857,
|
|
"step": 29
|
|
},
|
|
{
|
|
"epoch": 0.18404907975460122,
|
|
"grad_norm": 14.598164558410645,
|
|
"learning_rate": 7.1428571428571436e-06,
|
|
"loss": 0.6952,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.1901840490797546,
|
|
"grad_norm": 32.71424865722656,
|
|
"learning_rate": 7.0408163265306125e-06,
|
|
"loss": 2.0721,
|
|
"step": 31
|
|
},
|
|
{
|
|
"epoch": 0.19631901840490798,
|
|
"grad_norm": 32.908966064453125,
|
|
"learning_rate": 6.938775510204082e-06,
|
|
"loss": 2.0366,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.20245398773006135,
|
|
"grad_norm": 20.541730880737305,
|
|
"learning_rate": 6.836734693877551e-06,
|
|
"loss": 1.9455,
|
|
"step": 33
|
|
},
|
|
{
|
|
"epoch": 0.2085889570552147,
|
|
"grad_norm": 33.4826545715332,
|
|
"learning_rate": 6.734693877551021e-06,
|
|
"loss": 1.875,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.2147239263803681,
|
|
"grad_norm": 27.954130172729492,
|
|
"learning_rate": 6.63265306122449e-06,
|
|
"loss": 1.5669,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.22085889570552147,
|
|
"grad_norm": 28.17574119567871,
|
|
"learning_rate": 6.530612244897959e-06,
|
|
"loss": 0.7674,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.22699386503067484,
|
|
"grad_norm": 27.853933334350586,
|
|
"learning_rate": 6.4285714285714295e-06,
|
|
"loss": 1.5963,
|
|
"step": 37
|
|
},
|
|
{
|
|
"epoch": 0.2331288343558282,
|
|
"grad_norm": 19.903644561767578,
|
|
"learning_rate": 6.326530612244899e-06,
|
|
"loss": 0.5877,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.2392638036809816,
|
|
"grad_norm": 35.6341552734375,
|
|
"learning_rate": 6.224489795918368e-06,
|
|
"loss": 2.0134,
|
|
"step": 39
|
|
},
|
|
{
|
|
"epoch": 0.24539877300613497,
|
|
"grad_norm": 35.47877502441406,
|
|
"learning_rate": 6.122448979591837e-06,
|
|
"loss": 1.9894,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.25153374233128833,
|
|
"grad_norm": 28.387544631958008,
|
|
"learning_rate": 6.020408163265307e-06,
|
|
"loss": 1.5612,
|
|
"step": 41
|
|
},
|
|
{
|
|
"epoch": 0.25766871165644173,
|
|
"grad_norm": 16.537662506103516,
|
|
"learning_rate": 5.918367346938776e-06,
|
|
"loss": 0.5503,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.26380368098159507,
|
|
"grad_norm": 19.7191162109375,
|
|
"learning_rate": 5.816326530612246e-06,
|
|
"loss": 1.5554,
|
|
"step": 43
|
|
},
|
|
{
|
|
"epoch": 0.26993865030674846,
|
|
"grad_norm": 28.564775466918945,
|
|
"learning_rate": 5.7142857142857145e-06,
|
|
"loss": 2.0555,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.27607361963190186,
|
|
"grad_norm": 37.1331672668457,
|
|
"learning_rate": 5.6122448979591834e-06,
|
|
"loss": 2.2492,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.2822085889570552,
|
|
"grad_norm": 40.33460998535156,
|
|
"learning_rate": 5.510204081632653e-06,
|
|
"loss": 2.8251,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.2883435582822086,
|
|
"grad_norm": 36.697486877441406,
|
|
"learning_rate": 5.408163265306123e-06,
|
|
"loss": 0.6507,
|
|
"step": 47
|
|
},
|
|
{
|
|
"epoch": 0.294478527607362,
|
|
"grad_norm": 38.03018569946289,
|
|
"learning_rate": 5.306122448979593e-06,
|
|
"loss": 1.335,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.3006134969325153,
|
|
"grad_norm": 34.11387634277344,
|
|
"learning_rate": 5.204081632653062e-06,
|
|
"loss": 2.0543,
|
|
"step": 49
|
|
},
|
|
{
|
|
"epoch": 0.3067484662576687,
|
|
"grad_norm": 33.1131706237793,
|
|
"learning_rate": 5.1020408163265315e-06,
|
|
"loss": 2.1334,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.3128834355828221,
|
|
"grad_norm": 27.1938419342041,
|
|
"learning_rate": 5e-06,
|
|
"loss": 1.4209,
|
|
"step": 51
|
|
},
|
|
{
|
|
"epoch": 0.31901840490797545,
|
|
"grad_norm": 27.597105026245117,
|
|
"learning_rate": 4.897959183673469e-06,
|
|
"loss": 1.5301,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.32515337423312884,
|
|
"grad_norm": 48.28635787963867,
|
|
"learning_rate": 4.795918367346939e-06,
|
|
"loss": 1.8728,
|
|
"step": 53
|
|
},
|
|
{
|
|
"epoch": 0.3312883435582822,
|
|
"grad_norm": 29.6666316986084,
|
|
"learning_rate": 4.693877551020409e-06,
|
|
"loss": 1.7786,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.3374233128834356,
|
|
"grad_norm": 24.084613800048828,
|
|
"learning_rate": 4.591836734693878e-06,
|
|
"loss": 1.3896,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.34355828220858897,
|
|
"grad_norm": 29.24120330810547,
|
|
"learning_rate": 4.489795918367348e-06,
|
|
"loss": 1.2668,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.3496932515337423,
|
|
"grad_norm": 20.895090103149414,
|
|
"learning_rate": 4.3877551020408165e-06,
|
|
"loss": 1.8259,
|
|
"step": 57
|
|
},
|
|
{
|
|
"epoch": 0.3558282208588957,
|
|
"grad_norm": 28.952999114990234,
|
|
"learning_rate": 4.2857142857142855e-06,
|
|
"loss": 1.7763,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.3619631901840491,
|
|
"grad_norm": 19.181821823120117,
|
|
"learning_rate": 4.183673469387755e-06,
|
|
"loss": 1.9193,
|
|
"step": 59
|
|
},
|
|
{
|
|
"epoch": 0.36809815950920244,
|
|
"grad_norm": 22.564476013183594,
|
|
"learning_rate": 4.081632653061225e-06,
|
|
"loss": 1.1478,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.37423312883435583,
|
|
"grad_norm": 27.566129684448242,
|
|
"learning_rate": 3.979591836734694e-06,
|
|
"loss": 1.5727,
|
|
"step": 61
|
|
},
|
|
{
|
|
"epoch": 0.3803680981595092,
|
|
"grad_norm": 29.382600784301758,
|
|
"learning_rate": 3.877551020408164e-06,
|
|
"loss": 1.7253,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.38650306748466257,
|
|
"grad_norm": 18.100589752197266,
|
|
"learning_rate": 3.7755102040816327e-06,
|
|
"loss": 1.8991,
|
|
"step": 63
|
|
},
|
|
{
|
|
"epoch": 0.39263803680981596,
|
|
"grad_norm": 28.909286499023438,
|
|
"learning_rate": 3.6734693877551024e-06,
|
|
"loss": 1.3451,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.3987730061349693,
|
|
"grad_norm": 22.60663414001465,
|
|
"learning_rate": 3.5714285714285718e-06,
|
|
"loss": 1.4336,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.4049079754601227,
|
|
"grad_norm": 18.791940689086914,
|
|
"learning_rate": 3.469387755102041e-06,
|
|
"loss": 1.056,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.4110429447852761,
|
|
"grad_norm": 27.81426239013672,
|
|
"learning_rate": 3.3673469387755105e-06,
|
|
"loss": 1.6418,
|
|
"step": 67
|
|
},
|
|
{
|
|
"epoch": 0.4171779141104294,
|
|
"grad_norm": 48.35829162597656,
|
|
"learning_rate": 3.2653061224489794e-06,
|
|
"loss": 2.4501,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.4233128834355828,
|
|
"grad_norm": 27.157045364379883,
|
|
"learning_rate": 3.1632653061224496e-06,
|
|
"loss": 1.5875,
|
|
"step": 69
|
|
},
|
|
{
|
|
"epoch": 0.4294478527607362,
|
|
"grad_norm": 30.503982543945312,
|
|
"learning_rate": 3.0612244897959185e-06,
|
|
"loss": 1.3503,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.43558282208588955,
|
|
"grad_norm": 23.582530975341797,
|
|
"learning_rate": 2.959183673469388e-06,
|
|
"loss": 1.3834,
|
|
"step": 71
|
|
},
|
|
{
|
|
"epoch": 0.44171779141104295,
|
|
"grad_norm": 28.846086502075195,
|
|
"learning_rate": 2.8571428571428573e-06,
|
|
"loss": 1.7887,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.44785276073619634,
|
|
"grad_norm": 14.605477333068848,
|
|
"learning_rate": 2.7551020408163266e-06,
|
|
"loss": 0.979,
|
|
"step": 73
|
|
},
|
|
{
|
|
"epoch": 0.4539877300613497,
|
|
"grad_norm": 25.760046005249023,
|
|
"learning_rate": 2.6530612244897964e-06,
|
|
"loss": 1.5663,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.4601226993865031,
|
|
"grad_norm": 18.653833389282227,
|
|
"learning_rate": 2.5510204081632657e-06,
|
|
"loss": 1.7759,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.4662576687116564,
|
|
"grad_norm": 27.419715881347656,
|
|
"learning_rate": 2.4489795918367347e-06,
|
|
"loss": 2.4035,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.4723926380368098,
|
|
"grad_norm": 27.352413177490234,
|
|
"learning_rate": 2.3469387755102044e-06,
|
|
"loss": 1.1448,
|
|
"step": 77
|
|
},
|
|
{
|
|
"epoch": 0.4785276073619632,
|
|
"grad_norm": 40.72161102294922,
|
|
"learning_rate": 2.244897959183674e-06,
|
|
"loss": 1.8509,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.48466257668711654,
|
|
"grad_norm": 33.176212310791016,
|
|
"learning_rate": 2.1428571428571427e-06,
|
|
"loss": 1.8893,
|
|
"step": 79
|
|
},
|
|
{
|
|
"epoch": 0.49079754601226994,
|
|
"grad_norm": 29.56044578552246,
|
|
"learning_rate": 2.0408163265306125e-06,
|
|
"loss": 2.131,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.49693251533742333,
|
|
"grad_norm": 23.454936981201172,
|
|
"learning_rate": 1.938775510204082e-06,
|
|
"loss": 1.4972,
|
|
"step": 81
|
|
},
|
|
{
|
|
"epoch": 0.5030674846625767,
|
|
"grad_norm": 36.35444259643555,
|
|
"learning_rate": 1.8367346938775512e-06,
|
|
"loss": 1.6202,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.50920245398773,
|
|
"grad_norm": 27.361839294433594,
|
|
"learning_rate": 1.7346938775510206e-06,
|
|
"loss": 1.7515,
|
|
"step": 83
|
|
},
|
|
{
|
|
"epoch": 0.5153374233128835,
|
|
"grad_norm": 40.22319793701172,
|
|
"learning_rate": 1.6326530612244897e-06,
|
|
"loss": 1.7665,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.5214723926380368,
|
|
"grad_norm": 24.91071891784668,
|
|
"learning_rate": 1.5306122448979593e-06,
|
|
"loss": 1.7337,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.5276073619631901,
|
|
"grad_norm": 21.369565963745117,
|
|
"learning_rate": 1.4285714285714286e-06,
|
|
"loss": 2.279,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.5337423312883436,
|
|
"grad_norm": 23.668081283569336,
|
|
"learning_rate": 1.3265306122448982e-06,
|
|
"loss": 1.07,
|
|
"step": 87
|
|
},
|
|
{
|
|
"epoch": 0.5398773006134969,
|
|
"grad_norm": 25.3243408203125,
|
|
"learning_rate": 1.2244897959183673e-06,
|
|
"loss": 1.7643,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.5460122699386503,
|
|
"grad_norm": 24.15099334716797,
|
|
"learning_rate": 1.122448979591837e-06,
|
|
"loss": 1.4947,
|
|
"step": 89
|
|
},
|
|
{
|
|
"epoch": 0.5521472392638037,
|
|
"grad_norm": 16.557153701782227,
|
|
"learning_rate": 1.0204081632653063e-06,
|
|
"loss": 1.5758,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.558282208588957,
|
|
"grad_norm": 25.761274337768555,
|
|
"learning_rate": 9.183673469387756e-07,
|
|
"loss": 1.8561,
|
|
"step": 91
|
|
},
|
|
{
|
|
"epoch": 0.5644171779141104,
|
|
"grad_norm": 26.440319061279297,
|
|
"learning_rate": 8.163265306122449e-07,
|
|
"loss": 1.6105,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 0.5705521472392638,
|
|
"grad_norm": 22.7464542388916,
|
|
"learning_rate": 7.142857142857143e-07,
|
|
"loss": 1.1884,
|
|
"step": 93
|
|
},
|
|
{
|
|
"epoch": 0.5766871165644172,
|
|
"grad_norm": 34.55144500732422,
|
|
"learning_rate": 6.122448979591837e-07,
|
|
"loss": 1.7501,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 0.5828220858895705,
|
|
"grad_norm": 35.58631896972656,
|
|
"learning_rate": 5.102040816326531e-07,
|
|
"loss": 1.3714,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.588957055214724,
|
|
"grad_norm": 23.65045738220215,
|
|
"learning_rate": 4.0816326530612243e-07,
|
|
"loss": 1.3916,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 0.5950920245398773,
|
|
"grad_norm": 38.99002456665039,
|
|
"learning_rate": 3.0612244897959183e-07,
|
|
"loss": 1.7839,
|
|
"step": 97
|
|
},
|
|
{
|
|
"epoch": 0.6012269938650306,
|
|
"grad_norm": 21.398700714111328,
|
|
"learning_rate": 2.0408163265306121e-07,
|
|
"loss": 0.9391,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 0.6073619631901841,
|
|
"grad_norm": 21.622156143188477,
|
|
"learning_rate": 1.0204081632653061e-07,
|
|
"loss": 1.4566,
|
|
"step": 99
|
|
},
|
|
{
|
|
"epoch": 0.6134969325153374,
|
|
"grad_norm": 47.138668060302734,
|
|
"learning_rate": 0.0,
|
|
"loss": 2.9306,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.6134969325153374,
|
|
"step": 100,
|
|
"total_flos": 2305515375820800.0,
|
|
"train_loss": 1.7183861404657363,
|
|
"train_runtime": 6171.3384,
|
|
"train_samples_per_second": 0.016,
|
|
"train_steps_per_second": 0.016
|
|
}
|
|
],
|
|
"logging_steps": 1,
|
|
"max_steps": 100,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 1,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 2305515375820800.0,
|
|
"train_batch_size": 1,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|